def batch_work(args): if args.METHOD not in ['verify', 'exploit']: logger.error('Error method, please check out...') sys.exit() if args.PROXY: init_proxy(args.PROXY) if args.poc != 'all': poc = import_module_with_path(args.poc) logger.info('Batch startting with "%s"' % ('verify' if args.METHOD == 'verify' else 'exploit')) start_time = time.time() bt = BatchTest(seed_file=args.targets, funcs2run=(poc.__name__, (poc.MyPoc.verify if args.METHOD == 'verify' else poc.MyPoc.exploit)), result_file='batch_%s_result_' % args.METHOD + os.path.splitext(os.path.basename(args.poc))[0] + '.txt', thread_num=args.THREADS, verbose=False) bt.start(norm_target_func=normalize_url) logger.info('total number: %d, success number: %d, failed number: %d' % (bt.total_num, bt.success_num, (bt.total_num - bt.success_num))) logger.info('cost %f seconds.' % (time.time() - start_time)) else: # Add pass
def batch_work(args): if args.METHOD not in ['verify', 'exploit']: logger.error('Error method, please check out...') sys.exit() if args.PROXY: init_proxy(args.PROXY) if args.poc != 'all': poc = import_module_with_path(args.poc) funcs = (poc.__name__, (poc.MyPoc.verify if args.METHOD == 'verify' else poc.MyPoc.exploit)) outfile = 'batch_%s_result_' % args.METHOD + os.path.splitext( os.path.basename(args.poc))[0] + '.txt' logger.info('Batch startting with "%s"' % ('verify' if args.METHOD == 'verify' else 'exploit')) start_time = time.time() bt = BatchTest(seed_file=args.targets, funcs2run=funcs, result_file=outfile, thread_num=args.THREADS, verbose=False) bt.start(norm_target_func=normalize_url) logger.info('total number: %d, success number: %d, failed number: %d' % (bt.total_num, bt.success_num, (bt.total_num - bt.success_num))) logger.info('cost %f seconds.' % (time.time() - start_time)) else: # Add path = args.MODULE_DIR module_path = _default_module_path if not path else os.path.expanduser( path) pocs = import_all_modules_with_dirname(module_path) funcs = [(poc.__name__, poc.MyPoc.verify if args.METHOD == 'verify' else poc.MyPoc.exploit) for poc in pocs] outfile = 'batch_%s_result_all' % args.METHOD + '.txt' logger.info('Batch all startting with "%s"' % ('verify' if args.METHOD == 'verify' else 'exploit')) start_time = time.time() bt = BatchTest(seed_file=args.targets, funcs2run=funcs, result_file=outfile, thread_num=args.THREADS, verbose=False) bt.start(norm_target_func=normalize_url) logger.info('total number: %d, success number: %d, failed number: %d' % (bt.total_num, bt.success_num, (bt.total_num - bt.success_num))) logger.info('cost %f seconds.' % (time.time() - start_time))
def fetch_work(args): if args.PROXY: init_proxy(args.PROXY) debug = False if args.QUIET else True outfile = args.OUTFILE query = args.query google = Google(debug_mode=debug) urls = google.fetch_results(query) quit_process(outfile, urls)
def download_work(args): if args.PROXY: init_proxy(args.PROXY) cookie = args.COOKIE if args.COOKIE else None n_success = 0 n_fail = 0 if args.poc != 'all': poc_id = args.poc if not re.search(_ID_REGEX, poc_id): logger.error('Error format on poc id, please reinput.') else: if download_poc(poc_id, cookie): n_success += 1 else: n_fail += 1 else: logger.info('Download all pocs from "beebeeto.com"') logger.warning( 'PoC existed will be overwrite, type [Enter] to continue.') raw_input() if True: crawl_dic = {'http://beebeeto.com/pdb/?page=1': False} while False in crawl_dic.values(): crawl_url = choice([ link for link, crawled in crawl_dic.items() if not crawled ]) try: content = requests.get(crawl_url).content crawl_dic[crawl_url] = True except Exception, e: logger.error('Exception occured "%s" (%s)' % (Exception, e)) break if content: crawl_dic = parse_page_from_content(content, crawl_dic) ids = parse_poc_id_from_content(content) for poc_id in ids: if download_poc(poc_id, cookie): n_success += 1 else: n_fail += 1 else:
def download_work(args): if args.PROXY: init_proxy(args.PROXY) cookie = args.COOKIE if args.COOKIE else None n_success = 0 n_fail = 0 if args.poc != 'all': poc_id = args.poc if not re.search(_ID_REGEX, poc_id): logger.error('Error format on poc id, please reinput.') else: if download_poc(poc_id, cookie): n_success += 1 else: n_fail += 1 else: logger.info('Download all pocs from "beebeeto.com"') logger.warning('PoC existed will be overwrite, type [Enter] to continue.') raw_input() if True: crawl_dic = {'http://beebeeto.com/pdb/?page=1': False} while False in crawl_dic.values(): crawl_url = choice([link for link, crawled in crawl_dic.items() if not crawled]) try: content = requests.get(crawl_url).content crawl_dic[crawl_url] = True except Exception, e: logger.error('Exception occured "%s" (%s)' % (Exception, e)) break if content: crawl_dic = parse_page_from_content(content, crawl_dic) ids = parse_poc_id_from_content(content) for poc_id in ids: if download_poc(poc_id, cookie): n_success += 1 else: n_fail += 1 else: