def test(self): pool = self.pool = ThreadPool(2) refs = [] obj = SomeClass() obj.refs = refs func = obj.func del obj with greentest.disabled_gc(): # we do this: # result = func(Object(), kwarg1=Object()) # but in a thread pool and see that arguments', result's and func's references are not leaked result = pool.apply(func, (Object(), ), {'kwarg1': Object()}) assert isinstance(result, Object), repr(result) gevent.sleep(0.1) # XXX should not be needed refs.append(weakref.ref(func)) del func, result if PYPY: gc.collect() gc.collect() for index, r in enumerate(refs): assert r() is None, (index, r(), greentest.getrefcount(r()), refs) assert len(refs) == 4, refs
def __init__(self, *args, **kwargs): self.log = kwargs.pop('log', logging) self.pool_size = kwargs.pop('pool_size', self.pool_size) super(CommandServer, self).__init__(*args, **kwargs) self.pool_size_warning = int(self.pool_size / 3.0 * 2.0) self.pool = ThreadPool(self.pool_size) self.clients = set()
def __init__(self): self.pool = ThreadPool(10) self.result = [] self.port = "5984" self.q = [] self.randomstrs = ['a', 'k', 'b', 'v', 'd', 'f', 'e', 'g'] self.path = '_utils/index.html'
def __init__(self, callbacks, args, **kwargs): """Constructor. :param callbacks: Callbacks for registered action handlers. :type callbacks: dict :param args: CLI arguments. :type args: dict :param error_callback: Callback to use when errors occur. :type error_callback: function :param source_file: Full path to component source file. :type source_file: str """ self.__args = args self.__socket = None self.__schema_registry = get_schema_registry() self._pool = ThreadPool(cpu_count() * 5) self.callbacks = callbacks self.error_callback = kwargs.get('error_callback') self.source_file = kwargs.get('source_file') self.context = None self.poller = None
def __init__(self, k8s_config=None, k8s_namespace=None): from kubernetes import config, client from gevent.threadpool import ThreadPool if k8s_config is not None: self._k8s_config = k8s_config elif os.environ.get('KUBE_API_ADDRESS'): self._k8s_config = client.Configuration() self._k8s_config.host = os.environ['KUBE_API_ADDRESS'] else: self._k8s_config = config.load_incluster_config() verify_ssl = bool( int(os.environ.get('KUBE_VERIFY_SSL', '1').strip('"'))) if not verify_ssl: c = client.Configuration() c.verify_ssl = False client.Configuration.set_default(c) self._k8s_namespace = k8s_namespace or os.environ.get( 'MARS_K8S_POD_NAMESPACE') or 'default' self._full_label_selector = None self._client = client.CoreV1Api(client.ApiClient(self._k8s_config)) self._pool = ThreadPool(1) self._service_pod_to_ep = dict()
def __init__(self, host, port=80, threads=1000): self.host = host self.port = port self.pool = ThreadPool(threads) self.payload = ('POST / HTTP/1.1' 'Connection: keep-alive' 'Content-Length: 999999999999999999999999999999')
def test(self): pool = self.pool = ThreadPool(2) refs = [] obj = SomeClass() obj.refs = refs func = obj.func del obj with disabled_gc(): # we do this: # result = func(Object(), kwarg1=Object()) # but in a thread pool and see that arguments', result's and func's references are not leaked result = pool.apply(func, (Object(), ), {'kwarg1': Object()}) assert isinstance(result, Object), repr(result) gevent.sleep(0.1) # XXX should not be needed refs.append(weakref.ref(func)) del func, result if PYPY: gc.collect() gc.collect() for r in refs: self.assertIsNone(r()) self.assertEqual(4, len(refs))
def __init__(self): self.plugins = [] self.counter = 0 self.pool = variablesizepool.VariableSizePool(1) self.group = Group() self.hddsem = Semaphore(2) self.threadpool = ThreadPool(2)
def __init__(self, proxies=None, upgrade_interval_day='7d', http_timeout=15): """ :param proxies: :param upgrade_interval_day: :param http_timeout: """ self.http_timeout = int(http_timeout) self.cve_path = paths.CVE_PATH self.cve_cpe_db = paths.DB_FILE self.cpe_file = os.path.join(self.cve_path, 'nvdcpematch-1.0.json') interval_type = re.search(r'(\d+)(\w)', upgrade_interval_day) if interval_type and interval_type.group(2) in ('d', 'h'): if interval_type.group(2) == 'd': self.upgrade_interval = 60 * 60 * 24 * int(interval_type.group(1)) elif interval_type.group(2) == 'h': self.upgrade_interval = 60 * 60 * int(interval_type.group(1)) else: self.upgrade_interval = 60 * 60 * 24 * 7 self.headers = { "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3", "accept-encoding": "gzip, deflate, br", "accept-language": "en;q=0.9", "connection": "keep-alive", "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108" } self.headers.update(conf['http']['headers']) self.pool = ThreadPool(10) logger.info('Proxies: {0}'.format(proxies)) self.proxies = proxies
def start_blacklist(self, project): """ :param project: :return: """ def __scan(file_path): for rule in self.blacklist_rule: flag, _ = rule.verify(reference_value=file_path) project.logger.debug( "[RuleScanner] rule: [{0}], file: [{1}]".format( rule, file_path)) if flag: relative_path = file_path.replace(project.scan_path, "") project.logger.info( "[RuleScanner] [Blacklist] [+] Found '{0}' vulnerability in '{1}' file." .format(rule.name, relative_path)) info = rule.get_info(match_content=_, origin_file=file_path) if project.web_url: report = '{0}/blob/{1}{2}#L{3}'.format( project.web_url, project.branch, relative_path, info['start_line']) else: report = '' author, author_email = get_git_author( project.get_last_author(file_path)) vuln = Vulnerability( task_id=project.task_id, rule_key=rule.key, risk_id=rule.risk_id, title=rule.name, file=relative_path, author=author, author_email=author_email, hash=project.get_last_commit(), start_line=info['start_line'], end_line=info['end_line'], report=report, code_example=info['code_example'], evidence_content=_, engine=self.key, ) vuln_key = hash_md5('{0}_{1}_{2}'.format( file_path, rule.id, info['start_line'])) if vuln_key not in kb.result[project.key]: kb.result[project.key][vuln_key] = vuln.info project.logger.debug('[RuleScanner] {0}'.format(vuln)) project.logger.info( "[RuleScanner] Begin to perform rule-based blacklist vulnerability analysis..." ) pool = ThreadPool(project.threads or 20) for fpath, dirs, fs in os.walk(project.scan_path): for f in fs: pool.spawn(__scan, os.path.join(fpath, f)) gevent.wait() project.logger.info("[RuleScanner] Rule blacklist scan completed.")
def start(self): offset = 1 time.sleep(1) pool = ThreadPool(THREAD_NUM) while self.run: offsets = [i + offset for i in range(10)] pool.map(self.get_a_list, offsets) offset += 10
def __init__(self, server): self._server = server self._pool = ThreadPool(CPUS * 4) self._listen_sock = None self._wsgi_server = None BaseEngine.__init__(self, server) Signaler.__init__(self)
def easy_parallelize_gevent(f, sequence): if not "gevent_pool" in PARALLEL_STRUCTURES: from gevent.threadpool import ThreadPool pool = ThreadPool(30000) PARALLEL_STRUCTURES["gevent_pool"] = pool pool = PARALLEL_STRUCTURES["gevent_pool"] result = pool.map(f, sequence) return result
def targets(self, activity): activities = self.get_contacts_by_activity[activity['id']] contacts = [int(c) for c in activities[TARGETS]] pool = ThreadPool(THREADS) contacts = [pool.spawn(self.get_contact, c) for c in contacts] gevent.wait() contacts = [c.get()['sort_name'] for c in contacts] return ', '.join(contacts)
def test_apply_raises(self): self.pool = pool = ThreadPool(1) def raiser(): raise ExpectedException() with self.assertRaises(ExpectedException): pool.apply(raiser)
def test_unordered(self): self.pool = ThreadPool(3) def unordered(): return list(self.pool.imap_unordered(lambda x: None, error_iter())) self.assertRaises(greentest.ExpectedException, unordered) gevent.sleep(0.001)
def __init__(self, info): self.pool = ThreadPool(30) self.page = 80 self.headers = { 'Connection': 'close', 'Upgrade-Insecure-Requests': '1', 'User-Agent': "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; AcooBrowser; .NET CLR 1.1.4322; .NET CLR 2.0.50727)", 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Accept-Encoding': 'gzip, deflate, sdch, br', 'Accept-Language': 'zh-CN,zh;q=0.8', } self.urls_result = set() self.ips_result = set() self.ips_filename = "IP.txt" self.urls_filename = "URL.txt" self.proxy = { 'http': 'http://127.0.0.1:1080', 'https': 'http://127.0.0.1:1080' } self.shodan_url = "https://api.shodan.io/shodan/host/search?query=apache&key=MM72AkzHXdHpC8iP65VVEEVrJjp7zkgd" self.shodan_token = "XHSWncMjN6MEyekECTMcOeoEocl6VO2q" self.shodan_keyword = info['shodan'] self.censys_url = "https://censys.io/api/v1/search/ipv4" self.censys_api_id = "9b611dbd-366b-41b1-a50e-1a024004609f" self.censys_secret = "wAUW4Ax9uyCkD7JrgS1ItJE5nHQD5DnR" self.censys_keyword = info['censys'] self.fofa_email = "*****@*****.**" self.fofa_token = "xxxx" self.fofa_keyword = info['fofa'] self.zoomeye_url = "https://api.zoomeye.org/host/search?page={}&query={}" self.zoomeye_user = "******" self.zoomeye_pass = "******" self.zoomeye_keyword = info['zoomeye'] self.zoomeye_pool = ThreadPool(10) self.baidu_url = "http://www.baidu.com/s?wd={}&pn={}0" self._360_url = "https://www.so.com/s?q={}&pn={}&fr=so.com" self.google_url = "https://www.google.com/search?q={}&safe=strict&start={}" self.keyword = info['b3g']
def invalid(): tp = ThreadPool(10) # It's invalid to use callback. greenlets = [ tp.apply_async(foo, args=(i, ), kwds=None, callback=callback_) for i in xrange(1, 300) ] gevent.joinall(greenlets)
def test_setzero(self): pool = self.pool = ThreadPool(3) pool.spawn(sleep, 0.1) pool.spawn(sleep, 0.2) pool.spawn(sleep, 0.3) gevent.sleep(0.2) self.assertEqual(pool.size, 3) pool.maxsize = 0 gevent.sleep(0.2) self.assertEqualFlakyRaceCondition(pool.size, 0)
def __init__(self): """ """ conf = get_config() if not conf: raise Exception(u'配置文件读取失败!') self.api = GitlabAPIHandler(api_url=conf['gitlab']['api_url'], token=conf['gitlab']['token']) self.activity_limit_month = conf['gitlab']['activity_month'] or 12 self.tz = timezone('Asia/Shanghai') self.pool = ThreadPool(20)
def _admin_init(self): if self.coll_dir: self.load_coll_dir() if self.inputs: if self.load_cache(): return pool = ThreadPool(maxsize=1) pool.spawn(self.safe_auto_load_warcs)
def test_setzero(self): pool = self.pool = ThreadPool(3) pool.spawn(sleep, 0.001) pool.spawn(sleep, 0.002) pool.spawn(sleep, 0.003) gevent.sleep(0.001) self.assertEqual(pool.size, 3) pool.maxsize = 0 gevent.sleep(0.01) self.assertEqual(pool.size, 0)
def test_inc(self): self.pool = ThreadPool(0) done = [] gevent.spawn(self.pool.spawn, done.append, 1) gevent.spawn_later(0.0001, self.pool.spawn, done.append, 2) gevent.sleep(0.01) self.assertEqual(done, []) self.pool.maxsize = 1 gevent.sleep(0.01) self.assertEqual(done, [1, 2])
def __init__(self, baseurl, threads=1, timeout=10, sleep=5): self.baseurl = baseurl self.threads = threads self.timeout = timeout self.sleep = sleep self.pool = ThreadPool(self.threads) self.Queue = queue.Queue() self.block = set() self.flag = 0 self.isstop = False
def bench_apply(loops): pool = ThreadPool(1) t0 = perf.perf_counter() for _ in xrange(loops): for _ in xrange(N): pool.apply(noop) pool.join() pool.kill() return perf.perf_counter() - t0
def __init__(self, **kwargs): """ """ super().__init__(**kwargs) self.component_rule = {} self.whitelist_rule = {} self.blacklist_rule = {} self._grep = GrepCMD() self._key = 1 self.sonarqube = None self.pool = ThreadPool(20)
def _get_messages(self): # Emulate batch messages by polling rabbitmq server multiple times pool = ThreadPool(settings.POLLER_CONFIG['batchsize']) for i in range(settings.POLLER_CONFIG['batchsize']): if settings.QUEUE_TYPE in ['SQS', 'sqs']: pool.spawn(self._get_sqs_messages) elif settings.QUEUE_TYPE in ['RABBITMQ', 'rabbitmq']: pool.spawn(self._get_rabbitmq_messages,i) else: raise ValueError('Incorrect value "%s" for QUEUE_TYPE in %s' % (settings.QUEUE_TYPE, settings.SETTINGS_MODULE))
def main(): all_content = collection.find({}, { 'uid': 1, 'title_html': 1, 'content_html': 1 }, no_cursor_timeout=True) pool = ThreadPool(200) threads = [pool.spawn(iteration, each) for each in all_content] gevent.joinall(threads)
def test_apply_raises(self): self.pool = pool = ThreadPool(1) def raiser(): raise ExpectedException() try: pool.apply(raiser) except ExpectedException: pass else: self.fail("Should have raised ExpectedException")
def test_inc(self): self.pool = ThreadPool(0) done = [] # Try to be careful not to tick over the libuv timer. # See libuv/loop.py:_start_callback_timer gevent.spawn(self.pool.spawn, done.append, 1) gevent.spawn_later(0.01, self.pool.spawn, done.append, 2) gevent.sleep(0.02) self.assertEqual(done, []) self.pool.maxsize = 1 gevent.sleep(0.02) self.assertEqualFlakyRaceCondition(done, [1, 2])