Example #1
0
 def __init__(self):
     init()
     self.console_width = getTerminalSize()[0] - 2
     self.lock = threading.Lock()
     self.STOP_ME = False
     parse_args(self)
     load_proxy(self)
     self.gen_params_queue()
     parse_request(self)
Example #2
0
 def __init__(self):
     init()
     self.console_width = getTerminalSize()[0] - 2
     self.lock = threading.Lock()
     self.STOP_ME = False
     parse_args(self)
     load_proxy(self)
     self.gen_params_queue()
     parse_request(self)
Example #3
0
async def main():
    argv = parse_args()
    urls_file = argv.target
    queue_put(urls_file)
    async with httpx.AsyncClient(verify=False) as client:  # 创建session
        tasks = []
        for _ in range(COROS_NUM):
            task = judge_site_status(client, q)
            tasks.append(task)
        await asyncio.wait(tasks)
Example #4
0
 def __init__(self):
     self.cracked_count = 0
     self.start_time = time.time()
     self.lock = threading.Lock()
     self.STOP_ME = False
     self.args = parse_args()
     self.request_thread_count = self.args.t
     self.request_count = 0
     parse_request(self)
     load_proxy(self)
     self.gen_params_queue()
     if self.args.auth:
         self.auth_mode = 'Basic'  # default is basic auth
         do_request(self, auth_schema_test=True)
     elif self.args.check_proxy:
         pass
     else:
         do_request(self, request_test=True)
Example #5
0
        threads = [gevent.spawn(self._scan, i) for i in range(self.options.threads)]
        gevent.joinall(threads)


def run_process(target, options, process_num, dns_servers, next_subs, scan_count, found_count, queue_size_list,
                tmp_dir,cdns):
    signal.signal(signal.SIGINT, user_abort)
    s = SubNameBrute(target=target, options=options, process_num=process_num,
                     dns_servers=dns_servers,cdns=cdns, next_subs=next_subs,
                     scan_count=scan_count, found_count=found_count, queue_size_list=queue_size_list,
                     tmp_dir=tmp_dir,)
    s.run()


if __name__ == '__main__':
    options, args = parse_args()
    start_time = time.time()
    # make tmp dirs
    tmp_dir = 'tmp/%s_%s' % (args[0], int(time.time()))
    if not os.path.exists(tmp_dir):
        os.makedirs(tmp_dir)

    multiprocessing.freeze_support()
    all_process = []
    dns_servers = load_dns_servers()
    cdns = load_cdn_domains()
    next_subs = load_next_sub(options)
    scan_count = multiprocessing.Value('i', 0)
    found_count = multiprocessing.Value('i', 0)
    queue_size_list = multiprocessing.Array('i', options.process)
Example #6
0
                print '[%s] Scan report saved to report/%s' % (get_time(), report_name)
                if args.browser:
                    webbrowser.open_new_tab(os.path.abspath('report/%s' % report_name))
            else:
                lock.acquire()
                print '[%s] No vulnerabilities found on sites in %s.' % (get_time(), file)
                lock.release()
        except IOError, e:
            sys.exit(-1)
        except Exception, e:
            print '[save_report_thread Exception] %s %s' % ( type(e) , str(e))
            sys.exit(-1)


if __name__ == '__main__':
    args = parse_args()

    if args.f:
        input_files = [args.f]
    elif args.d:
        input_files = glob.glob(args.d + '/*.txt')
    elif args.crawler:
        input_files = ['crawler']
    elif args.host:
        input_files = ['hosts']    # several hosts on command line

    scanned_ips = []    # all scanned IPs in current scan

    for file in input_files:
        if args.host:
            lines = [' '.join(args.host)]
Example #7
0
def batch_scan(url, q_results, lock, threads_num, timeout):
        print 'Scan', url
        a = InfoDisScanner(url, lock, timeout*60)
        host, results, severity = a.scan(threads=threads_num)
        if results:
            q_results.put((host, results, severity))

        if results:
            for key in results.keys():
                for url in results[key]:
                    print  '[+] [%s] %s' % (url['status'], url['url'])



if __name__ == '__main__':
    args = parse_args()
    if args.d:
        all_files = glob.glob(args.d + '/*.txt')
    elif args.f:
        all_files = [args.f]
    else:
        all_files = ['temp']    # several hosts on command line


    for file in all_files:
        start_time = time.time()
        if args.host:
            lines = [args.host]
        else:
            with open(file) as inFile:
                lines = inFile.readlines()
Example #8
0
import asyncio
import httpx
import time
from core.core import judge_path_status
from lib.queue_put import q, queue_put
from lib.cmdline import parse_args
from conf.config import COROS_NUM


async def main():
    '''
    main函数
    :return:
    '''
    async with httpx.AsyncClient(verify=False) as client:  # 创建session
        tasks = []
        for _ in range(COROS_NUM):
            task = judge_path_status(client, q)
            tasks.append(task)
        await asyncio.wait(tasks)


if __name__ == '__main__':
    start_time = time.time()
    argv = parse_args()
    urls_file, dict_file, filename_dict = argv.target, argv.dirs, argv.filenames
    queue_put(urls_file, dict_file, filename_dict)
    asyncio.run(main())
    print(f'Cost time: {time.time() - start_time}')
                scan_count, found_count, queue_size_list, tmp_dir):
    signal.signal(signal.SIGINT, user_abort)
    s = SubNameBrute(target=target,
                     options=options,
                     process_num=process_num,
                     dns_servers=dns_servers,
                     next_subs=next_subs,
                     scan_count=scan_count,
                     found_count=found_count,
                     queue_size_list=queue_size_list,
                     tmp_dir=tmp_dir)
    s.run()


if __name__ == '__main__':
    options, args = parse_args()  #获取用户输入参数,options为参数值,args为域名
    #读取域名文件,判断是否有参数-d
    domain_list = []

    if (options.domain != None):
        file_name = options.domain
        with open(file_name, 'r') as f:
            domain_list = f.readlines()
    else:
        domain_list.append(args[0])

    start_time = time.time()
    #循环获取域名
    for domain in domain_list:
        # make tmp dirs
        tmp_dir = 'tmp/%s_%s' % (domain.strip(), int(time.time()))
        threads = [gevent.spawn(self._scan, i) for i in range(self.options.threads)]
        gevent.joinall(threads)


def run_process(target, options, process_num, dns_servers, next_subs, scan_count, found_count, queue_size_list,
                tmp_dir):
    signal.signal(signal.SIGINT, user_abort)
    s = SubNameBrute(target=target, options=options, process_num=process_num,
                     dns_servers=dns_servers, next_subs=next_subs,
                     scan_count=scan_count, found_count=found_count, queue_size_list=queue_size_list,
                     tmp_dir=tmp_dir)
    s.run()


if __name__ == '__main__':
    options, args = parse_args()
    start_time = time.time()
    # make tmp dirs
    tmp_dir = 'tmp/%s_%s' % (args[0], int(time.time()))
    if not os.path.exists(tmp_dir):
        os.makedirs(tmp_dir)

    multiprocessing.freeze_support()
    all_process = []
    dns_servers = load_dns_servers()
    next_subs = load_next_sub(options)
    scan_count = multiprocessing.Value('i', 0)
    found_count = multiprocessing.Value('i', 0)
    queue_size_list = multiprocessing.Array('i', options.process)

    try: