def test_marker_does_exist(self): """Ensure the marker is not saved if it already exists.""" async_id = "asyncid" result = '{"foo": "bar"}' FuriousAsyncMarker(id=async_id, result=result, status=1).put() store_async_marker(async_id, 1) marker = FuriousAsyncMarker.get_by_id(async_id) self.assertEqual(marker.result, result) self.assertEqual(marker.status, 1)
def test_marker_does_not_exist(self): """Ensure the marker is saved if it does not already exist.""" async_id = "asyncid" store_async_marker(async_id, 0) self.assertIsNotNone(FuriousAsyncMarker.get_by_id(async_id))
def test_completion_store(self): """Ensure the marker is stored on completion even without a result.""" async = Async('foo') async._executed = True result = context_completion_checker(async) self.assertTrue(result) marker = FuriousAsyncMarker.get_by_id(async.id) self.assertIsNotNone(marker) self.assertEqual(marker.key.id(), async.id) self.assertEqual(marker.status, -1)
def setUp(self): super(NdbTestBase, self).setUp() os.environ['TZ'] = "UTC" self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.setup_env(app_id="furious") self.policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy( probability=HRD_POLICY_PROBABILITY) self.testbed.init_datastore_v3_stub(consistency_policy=self.policy) self.testbed.init_memcache_stub() # TODO: Kill this marker = FuriousAsyncMarker.query().fetch(1) self.assertEqual(marker, [])
def test_results_with_tasks_loaded_missing_result(self, get_multi_async): """Ensure results uses the cached tasks and yields them out when tasks are cached and there's no results. """ marker1 = FuriousAsyncMarker() context = Context(_task_ids=["1", "2", "3"]) context_result = ContextResult(context) context_result._task_cache = {"1": marker1, "2": None, "3": None} results = list(context_result.items()) results = sorted(results) self.assertEqual(results, [("1", None), ("2", None), ("3", None)]) self.assertFalse(get_multi_async.called)
class StoreAsyncMarkerTestCase(NdbTestBase): def test_marker_does_not_exist(self): """Ensure the marker is saved if it does not already exist.""" async_id = "asyncid" store_async_marker(async_id, 0) self.assertIsNotNone(FuriousAsyncMarker.get_by_id(async_id)) def test_marker_does_exist(self): """Ensure the marker is not saved if it already exists.""" async_id = "asyncid" result = '{"foo": "bar"}' FuriousAsyncMarker(id=async_id, result=result, status=1).put() store_async_marker(async_id, 1) marker = FuriousAsyncMarker.get_by_id(async_id) self.assertEqual(marker.result, result) self.assertEqual(marker.status, 1) def test_store_async_exception(self): """Ensure an async exception is encoded correctly.""" async_id = "asyncid" async_result = AsyncResult() try: raise Exception() except Exception, e: async_result.payload = encode_exception(e) async_result.status = async_result.ERROR store_async_result(async_id, async_result) marker = FuriousAsyncMarker.get_by_id(async_id) self.assertEqual(marker.result, json.dumps(async_result.to_dict())) self.assertEqual(marker.status, async_result.ERROR)
def _build_marker(payload=None, status=None): return FuriousAsyncMarker(result=json.dumps({ 'payload': payload, 'status': status }))
class IterResultsTestCase(NdbTestBase): def test_more_results_than_batch_size(self, get_multi_async): """Ensure all the results are yielded out when more than the batch size. """ marker1 = _build_marker(payload="1", status=1) marker2 = _build_marker(payload="2", status=1) marker3 = _build_marker(payload="3", status=1) future_set_1 = [_build_future(marker1), _build_future(marker2)] future_set_2 = [_build_future(marker3)] get_multi_async.side_effect = future_set_1, future_set_2 context = Context(_task_ids=["1", "2", "3"]) results = list(iter_context_results(context, batch_size=2)) self.assertEqual(results[0], ("1", marker1)) self.assertEqual(results[1], ("2", marker2)) self.assertEqual(results[2], ("3", marker3)) def test_less_results_than_batch_size(self, get_multi_async): """Ensure all the results are yielded out when less than the batch size. """ marker1 = _build_marker(payload="1", status=1) marker2 = _build_marker(payload="2", status=1) marker3 = _build_marker(payload="3", status=1) future_set_1 = [ _build_future(marker1), _build_future(marker2), _build_future(marker3) ] get_multi_async.return_value = future_set_1 context = Context(_task_ids=["1", "2", "3"]) results = list(iter_context_results(context)) self.assertEqual(results[0], ("1", marker1)) self.assertEqual(results[1], ("2", marker2)) self.assertEqual(results[2], ("3", marker3)) def test_no_task_ids(self, get_multi_async): """Ensure no results are yielded out when there are no task ids on the passed in context. """ get_multi_async.return_value = [] context = Context(_task_ids=[]) results = list(iter_context_results(context)) self.assertEqual(results, []) def test_keys_with_no_results(self, get_multi_async): """Ensure empty results are yielded out when there are no items to load but task ids are on the passed in context. """ future_set_1 = [_build_future(), _build_future(), _build_future()] get_multi_async.return_value = future_set_1 context = Context(_task_ids=["1", "2", "3"]) results = list(iter_context_results(context)) self.assertEqual(results[0], ("1", None)) self.assertEqual(results[1], ("2", None)) self.assertEqual(results[2], ("3", None)) def test_failure_in_marker(self, get_multi_async): """Ensure all the results are yielded out when less than the batch size and a failure is included in the results. """ async_id = "1" async_result = AsyncResult() try: raise Exception() except Exception, e: async_result.payload = encode_exception(e) async_result.status = async_result.ERROR json_dump = json.dumps(async_result.to_dict()) marker1 = FuriousAsyncMarker(id=async_id, result=json_dump, status=async_result.status) marker2 = FuriousAsyncMarker( result=json.dumps(AsyncResult(payload="2", status=1).to_dict())) marker3 = FuriousAsyncMarker( result=json.dumps(AsyncResult(payload="3", status=1).to_dict())) future_set_1 = [ _build_future(marker1), _build_future(marker2), _build_future(marker3) ] get_multi_async.return_value = future_set_1 context = Context(_task_ids=["1", "2", "3"]) context_result = ContextResult(context) results = list(context_result.items()) self.assertEqual(results[0], ("1", json.loads(json_dump)["payload"])) self.assertEqual(results[1], ("2", "2")) self.assertEqual(results[2], ("3", "3"))