def _init_futures(self): """Build futures for results and output; hook up callbacks""" if not self._children: for msg_id in self.msg_ids: future = self._client._futures.get(msg_id, None) if not future: result = self._client.results.get(msg_id, _default) # result resides in local cache, construct already-resolved Future if result is not _default: future = MessageFuture(msg_id) future.output = Future() future.output.metadata = self.client.metadata[msg_id] future.set_result(result) future.output.set_result(None) if not future: raise KeyError("No Future or result for msg_id: %s" % msg_id) self._children.append(future) self._result_future = multi_future(self._children) self._sent_future = multi_future([f.tracker for f in self._children]) self._sent_future.add_done_callback(self._handle_sent) self._output_future = multi_future([self._result_future] + [f.output for f in self._children]) # on completion of my constituents, trigger my own resolution self._result_future.add_done_callback(self._resolve_result) self._output_future.add_done_callback(self._resolve_output) self.add_done_callback(self._finalize_result)
def _init_futures(self): """Build futures for results and output; hook up callbacks""" if not self._children: for msg_id in self.msg_ids: future = self._client._futures.get(msg_id, None) if not future: result = self._client.results.get(msg_id, _default) # result resides in local cache, construct already-resolved Future if result is not _default: future = MessageFuture(msg_id) future.output = Future() future.output.metadata = self.client.metadata[msg_id] future.set_result(result) future.output.set_result(None) if not future: raise KeyError("No Future or result for msg_id: %s" % msg_id) self._children.append(future) self._result_future = multi_future(self._children) self._sent_future = multi_future([f.tracker for f in self._children]) self._sent_future.add_done_callback(self._handle_sent) self._output_future = multi_future([self._result_future] + [f.output for f in self._children]) # on completion of my constituents, trigger my own resolution self._result_future.add_done_callback(self._resolve_result) self._output_future.add_done_callback(self._resolve_output)
def f(): # callbacks run at different times responses = yield gen.multi_future([ self.delay(3, "v1"), self.delay(1, "v2"), ]) self.assertEqual(responses, ["v1", "v2"])
def f(): # callbacks run at different times responses = yield gen.multi_future( [gen.Task(self.delay_callback, 3, arg="v1"), gen.Task(self.delay_callback, 1, arg="v2")] ) self.assertEqual(responses, ["v1", "v2"]) self.stop()
def test_multi_future_exceptions(self): with ExpectLog(app_log, "Multiple exceptions in yield list"): with self.assertRaises(RuntimeError) as cm: yield [ self.async_exception(RuntimeError("error 1")), self.async_exception(RuntimeError("error 2")), ] self.assertEqual(str(cm.exception), "error 1") # With only one exception, no error is logged. with self.assertRaises(RuntimeError): yield [ self.async_exception(RuntimeError("error 1")), self.async_future(2) ] # Exception logging may be explicitly quieted. with self.assertRaises(RuntimeError): yield gen.multi_future( [ self.async_exception(RuntimeError("error 1")), self.async_exception(RuntimeError("error 2")), ], quiet_exceptions=RuntimeError, )
def f(): # callbacks run at different times responses = yield gen.multi_future(dict( foo=self.delay(3, "v1"), bar=self.delay(1, "v2"), )) self.assertEqual(responses, dict(foo="v1", bar="v2"))
def wait_until_finished(self): # if there are futures to wait for while self.futures: # wait for the futures to finish futures = self.futures self.futures = [] self.loop.run_sync(lambda: multi_future(futures))
def f(): # callbacks run at different times responses = yield gen.multi_future( dict(foo=gen.Task(self.delay_callback, 3, arg="v1"), bar=gen.Task(self.delay_callback, 1, arg="v2")) ) self.assertEqual(responses, dict(foo="v1", bar="v2")) self.stop()
def f(): # callbacks run at different times responses = yield gen.multi_future([ gen.Task(self.delay_callback, 3, arg="v1"), gen.Task(self.delay_callback, 1, arg="v2"), ]) self.assertEqual(responses, ["v1", "v2"]) self.stop()
def f(): # callbacks run at different times responses = yield gen.multi_future(dict( foo=gen.Task(self.delay_callback, 3, arg="v1"), bar=gen.Task(self.delay_callback, 1, arg="v2"), )) self.assertEqual(responses, dict(foo="v1", bar="v2")) self.stop()
def put(self, *args, **kwargs): futures = [] for i, region in enumerate(self.regions_list): db_instance = getattr(self, 'ddb-%s' % region) futures.append(db_instance.put(*args, **kwargs)) try: yield gen.multi_future(futures) except Exception, e: self._process_multiple_exceptions(futures)
def search_movie(self, search): try: print('search movie', search) if not self.movie: self._get_movie_obj(all=True) futures = [] for m in self.movie: result = m.search(search) futures.append(result) result = yield gen.multi_future(futures) return result except Exception as e: traceback.print_exc(file=sys.stdout)
def test_multi_future_exceptions(self): with ExpectLog(app_log, "Multiple exceptions in yield list"): with self.assertRaises(RuntimeError) as cm: yield [self.async_exception(RuntimeError("error 1")), self.async_exception(RuntimeError("error 2"))] self.assertEqual(str(cm.exception), "error 1") # With only one exception, no error is logged. with self.assertRaises(RuntimeError): yield [self.async_exception(RuntimeError("error 1")), self.async_future(2)] # Exception logging may be explicitly quieted. with self.assertRaises(RuntimeError): yield gen.multi_future( [self.async_exception(RuntimeError("error 1")), self.async_exception(RuntimeError("error 2"))], quiet_exceptions=RuntimeError, )
def test_api_call_queue_raises_exceptions(self): """ Test that the api call queue raises exceptions and proceeds to execute other queued api calls. """ @gen.coroutine def _call_without_exception(): result = yield self.api_call_queue.call( self._mock_api_function_sync, delay=0.05) self.assertEqual(result, 'OK') raise gen.Return('no exception') @gen.coroutine def _call_with_exception(): err = ValueError('test exception') try: yield self.api_call_queue.call(self._mock_api_function_sync, exception=err, delay=0.05) except Exception as e: self.assertEqual(err, e) raise gen.Return('exception') @gen.coroutine def _call_with_exception_after_boto2_rate_limit(): """ First rate limit, then raise an exception. This should take: call delay * 2 + min rate limiting delay * 1 """ try: yield self.api_call_queue.call( self._mock_api_function_sync, exception=[ self.boto2_throttle_exception_1, self.boto2_exception ], delay=0.05) except Exception as e: self.assertEqual(self.boto2_exception, e) raise gen.Return('exception') @gen.coroutine def _call_with_exception_after_boto3_rate_limit(): """ First rate limit, then raise an exception. This should take: call delay * 2 + min rate limiting delay * 1 """ try: yield self.api_call_queue.call( self._mock_api_function_sync, exception=[ self.boto3_throttle_exception, self.boto3_exception ], delay=0.05) except Exception as e: self.assertEqual(self.boto3_exception, e) raise gen.Return('exception') call_wrappers = [ # Should take 0.05s. _call_without_exception(), # Should take 0.05s. _call_with_exception(), # Should take 0.05s. _call_without_exception(), # Should take 0.05s + 0.05s + 0.05s. _call_with_exception_after_boto2_rate_limit(), # Should take 0.05s + 0.05s + 0.05s. _call_with_exception_after_boto3_rate_limit(), ] start = time.time() results = yield gen.multi_future(call_wrappers) stop = time.time() run_time = stop - start self.assertTrue(0.45 <= run_time < 0.55) self.assertEqual(results, [ 'no exception', 'exception', 'no exception', 'exception', 'exception' ])
def _searcher(self, string, find_at, query={}, database=None, count=50, cache=1800): if not self.site_id: self.site_id = self.site.site_db['_id'] if self.site_id: seo_string = function.seo_encode(string) # default database if not database: database = self.site.db.post # in cache if cache > 0: result = yield self.site.cache.get(source="%s%s" % (seo_string, find_at)) if result: return result words = [] word = seo_string # generic keywork while True: if not word in words: words.append(word) if '-' in word: word = word.rsplit('-', 1)[0] else: break word = seo_string while True: if word and not word in words: words.append(word) if '-' in word: word = word.split('-', 1)[1] else: break # start search if not 'site_id' in query: query['site_id'] = self.site_id search_dict = {} search_future = [] for word in words: word = word.strip() if len(word) > 0: # change search query query[find_at] = { '$regex': re.compile(".*" + word + ".*", re.IGNORECASE) } search_future.append( database.find(query, { "_id": 1 }).to_list(length=count)) search_result = yield gen.multi_future(search_future) for result in search_result: if result: for mv in result: if mv['_id'] not in search_dict: search_dict[mv['_id']] = 1 else: search_dict[mv['_id']] += 1 # sort result sort_dict = {} for k, v in search_dict.items(): if v not in sort_dict: sort_dict[v] = [k] else: sort_dict[v].append(k) result = [x for s in sort_dict.values() for x in s][::-1][:count] # cache store if cache > 0: yield self.site.cache.set(seo_string, result, cache) return result return []
def _searcher(self, string, find_at, query ={}, database=None, count=50, cache=1800): if not self.site_id: self.site_id = self.site.site_db['_id'] if self.site_id: seo_string = function.seo_encode(string) # default database if not database: database = self.site.db.post # in cache if cache > 0: result = yield self.site.cache.get(source="%s%s" % (seo_string, find_at)) if result: return result words = [] word = seo_string # generic keywork while True: if not word in words: words.append(word) if '-' in word: word = word.rsplit('-',1)[0] else: break word = seo_string while True: if word and not word in words: words.append(word) if '-' in word: word = word.split('-',1)[1] else: break # start search if not 'site_id' in query: query['site_id'] = self.site_id search_dict = {} search_future = [] for word in words: word = word.strip() if len(word) > 0: # change search query query[find_at] = {'$regex': re.compile(".*" + word + ".*", re.IGNORECASE)} search_future.append(database.find(query, {"_id": 1}).to_list(length=count)) search_result = yield gen.multi_future(search_future) for result in search_result: if result: for mv in result: if mv['_id'] not in search_dict: search_dict[mv['_id']] = 1 else: search_dict[mv['_id']] += 1 # sort result sort_dict = {} for k, v in search_dict.items(): if v not in sort_dict: sort_dict[v] = [k] else: sort_dict[v].append(k) result = [x for s in sort_dict.values() for x in s][::-1][:count] # cache store if cache > 0: yield self.site.cache.set(seo_string, result, cache) return result return []