Пример #1
0
 def it_method_scan_urls_return_add_urls_count_multi():
     settings.DB = settings.TEST_DATABASE
     settings.MULTI_PROCESS = True
     database.connect(settings.TEST_DATABASE)
     result = crawler.scan(max_limit=200)
     database.close()
     assert result < 0
Пример #2
0
 def it_method_scan_urls_return_add_urls_count_html():
     settings.DB = settings.TEST_DATABASE
     settings.MULTI_PROCESS = False
     database.connect(settings.TEST_DATABASE)
     result = crawler.scan(max_limit=50000)
     database.close()
     assert result == 51770
Пример #3
0
def scan(message):
    try:
        arg = str(message.text)
        arg = arg.replace('/scan', '')
        arg = arg.split(',')
        result = crawler.scan(arg[0], arg[1])
        bot.reply_to(message, result)
    except:
        bot.reply_to(message,
                     "Sintaxe invalida!\nUse /scanport ip, argumentos")
Пример #4
0
#!/usr/bin/python3

import time
import logging
import settings
import crawler

if __name__ == '__main__':
    logging.basicConfig(filename=settings.LOG_FILE,
                        level=logging.DEBUG,
                        format='%(asctime)s %(message)s')
    crawler.scan(max_limit=10)
Пример #5
0
import crawler
import argparse

parser = argparse.ArgumentParser()
parser.add_argument('--scan', help='perform a scan', action='store_true')
parser.add_argument('--infile', help='set a custom domain input file location')
parser.add_argument('--outfile', help='set a custom output log file location')
args = parser.parse_args()

if args.scan:
    stack = crawler.read_infile()

    if type(stack) is list:
        while stack:
            domain = stack.pop().strip()
            scan_results = crawler.scan(domain)
            log_status = crawler.write_outfile(scan_results)
            print(str(scan_results))
Пример #6
0
import crawler
import argparse
import logging
import warnings

if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument('-i', '--infile', help='set a custom domain input file location',
                        default='scanner_domains.txt', metavar='FILE')
    parser.add_argument('-o', '--outfile', help='set a custom output log file location', default='scanner_log.txt',
                        metavar='FILE')
    parser.add_argument('--sig', help='provide a signature to scan each target for', action='store', default='None',
                        metavar='TEXT')
    parser.add_argument('-d', '--debug', help='enable debugging output to the console', action='store_true',)
    args = parser.parse_args()

    if args.debug:
        logging.basicConfig(level=logging.DEBUG)
        warnings.warn('Debug logging enabled ...')

    crawler.scan(infile=args.infile, outfile=args.outfile, signature=args.sig)