def test_store_async_exception(self):
     """Ensure an async exception is encoded correctly."""
     async_id = "asyncid"
     async_result = AsyncResult()
     try:
         raise Exception()
     except Exception, e:
         async_result.payload = encode_exception(e)
         async_result.status = async_result.ERROR
예제 #2
0
 def test_store_async_exception(self):
     """Ensure an async exception is encoded correctly."""
     async_id = "asyncid"
     async_result = AsyncResult()
     try:
         raise Exception()
     except Exception, e:
         async_result.payload = encode_exception(e)
         async_result.status = async_result.ERROR
 def test_failure_in_marker(self, get_multi_async):
     """Ensure all the results are yielded out when less than the batch
     size and a failure is included in the results.
     """
     async_id = "1"
     async_result = AsyncResult()
     try:
         raise Exception()
     except Exception, e:
         async_result.payload = encode_exception(e)
         async_result.status = async_result.ERROR
예제 #4
0
 def test_failure_in_marker(self, get_multi_async):
     """Ensure all the results are yielded out when less than the batch
     size and a failure is included in the results.
     """
     async_id = "1"
     async_result = AsyncResult()
     try:
         raise Exception()
     except Exception, e:
         async_result.payload = encode_exception(e)
         async_result.status = async_result.ERROR
예제 #5
0
def run_job():
    """Takes an async object and executes its job."""
    async = get_current_async()
    async_options = async .get_options()

    job = async_options.get('job')
    if not job:
        raise Exception('This async contains no job to execute!')

    __, args, kwargs = job

    if args is None:
        args = ()

    if kwargs is None:
        kwargs = {}

    function = async ._decorate_job()

    try:
        async .executing = True
        async .result = AsyncResult(payload=function(*args, **kwargs),
                                    status=AsyncResult.SUCCESS)
    except Abort as abort:
        logging.info('Async job was aborted: %r', abort)
        async .result = AsyncResult(status=AsyncResult.ABORT)

        # QUESTION: In this eventuality, we should probably tell the context we
        # are "complete" and let it handle completion checking.
        _handle_context_completion_check(async)
        return
    except AbortAndRestart as restart:
        logging.info('Async job was aborted and restarted: %r', restart)
        raise
    except BaseException as e:
        async .result = AsyncResult(payload=encode_exception(e),
                                    status=AsyncResult.ERROR)

    _handle_results(async_options)
    _handle_context_completion_check(async)
예제 #6
0
    def test_is_error_with_no_callback(self, get_current_async):
        """Ensure an error process with no callback raises the error."""
        from furious.async import Async
        from furious.async import AsyncResult
        from furious.processors import encode_exception
        from furious.processors import _process_results

        async = Mock(spec=Async)

        try:
            raise Exception()
        except Exception, e:
            async.result = AsyncResult(payload=encode_exception(e),
                                       status=AsyncResult.ERROR)
예제 #7
0
    def test_completion_store_with_result(self):
        """Ensure the marker is stored on completion with a result."""

        async = Async('foo')
        async ._executing = True
        async .result = AsyncResult(status=1)
        async ._executed = True

        result = context_completion_checker(async)

        self.assertTrue(result)

        marker = FuriousAsyncMarker.get_by_id(async .id)

        self.assertIsNotNone(marker)
        self.assertEqual(marker.key.id(), async .id)
        self.assertEqual(marker.status, 1)
예제 #8
0
class IterResultsTestCase(NdbTestBase):
    def test_more_results_than_batch_size(self, get_multi_async):
        """Ensure all the results are yielded out when more than the batch
        size.
        """
        marker1 = _build_marker(payload="1", status=1)
        marker2 = _build_marker(payload="2", status=1)
        marker3 = _build_marker(payload="3", status=1)

        future_set_1 = [_build_future(marker1), _build_future(marker2)]
        future_set_2 = [_build_future(marker3)]

        get_multi_async.side_effect = future_set_1, future_set_2

        context = Context(_task_ids=["1", "2", "3"])

        results = list(iter_context_results(context, batch_size=2))

        self.assertEqual(results[0], ("1", marker1))
        self.assertEqual(results[1], ("2", marker2))
        self.assertEqual(results[2], ("3", marker3))

    def test_less_results_than_batch_size(self, get_multi_async):
        """Ensure all the results are yielded out when less than the batch
        size.
        """
        marker1 = _build_marker(payload="1", status=1)
        marker2 = _build_marker(payload="2", status=1)
        marker3 = _build_marker(payload="3", status=1)

        future_set_1 = [
            _build_future(marker1),
            _build_future(marker2),
            _build_future(marker3)
        ]

        get_multi_async.return_value = future_set_1

        context = Context(_task_ids=["1", "2", "3"])

        results = list(iter_context_results(context))

        self.assertEqual(results[0], ("1", marker1))
        self.assertEqual(results[1], ("2", marker2))
        self.assertEqual(results[2], ("3", marker3))

    def test_no_task_ids(self, get_multi_async):
        """Ensure no results are yielded out when there are no task ids on the
        passed in context.
        """
        get_multi_async.return_value = []
        context = Context(_task_ids=[])

        results = list(iter_context_results(context))

        self.assertEqual(results, [])

    def test_keys_with_no_results(self, get_multi_async):
        """Ensure empty results are yielded out when there are no items to
        load but task ids are on the passed in context.
        """
        future_set_1 = [_build_future(), _build_future(), _build_future()]

        get_multi_async.return_value = future_set_1

        context = Context(_task_ids=["1", "2", "3"])

        results = list(iter_context_results(context))

        self.assertEqual(results[0], ("1", None))
        self.assertEqual(results[1], ("2", None))
        self.assertEqual(results[2], ("3", None))

    def test_failure_in_marker(self, get_multi_async):
        """Ensure all the results are yielded out when less than the batch
        size and a failure is included in the results.
        """
        async_id = "1"
        async_result = AsyncResult()
        try:
            raise Exception()
        except Exception, e:
            async_result.payload = encode_exception(e)
            async_result.status = async_result.ERROR

        json_dump = json.dumps(async_result.to_dict())
        marker1 = FuriousAsyncMarker(id=async_id,
                                     result=json_dump,
                                     status=async_result.status)

        marker2 = FuriousAsyncMarker(
            result=json.dumps(AsyncResult(payload="2", status=1).to_dict()))
        marker3 = FuriousAsyncMarker(
            result=json.dumps(AsyncResult(payload="3", status=1).to_dict()))

        future_set_1 = [
            _build_future(marker1),
            _build_future(marker2),
            _build_future(marker3)
        ]

        get_multi_async.return_value = future_set_1

        context = Context(_task_ids=["1", "2", "3"])
        context_result = ContextResult(context)

        results = list(context_result.items())

        self.assertEqual(results[0], ("1", json.loads(json_dump)["payload"]))
        self.assertEqual(results[1], ("2", "2"))
        self.assertEqual(results[2], ("3", "3"))