Пример #1
0
    def test_individual_serialization(self):
        """
        Make sure a marker with children as IDs
        which is the state they would be in after loading
        from the persistence layer, the children maintain
        through serialization
        """
        from furious.marker_tree.identity_utils import leaf_persistence_id_from_group_id
        from furious.marker_tree.marker import Marker
        from furious.job_utils import encode_callbacks
        from furious.tests.marker_tree import dummy_success_callback

        marker = Marker.from_dict({"id": "test", "callbacks": encode_callbacks({"success": dummy_success_callback})})
        self.assertEqual(marker.id, "test")
        marker2 = Marker.from_dict(marker.to_dict())
        self.assertEqual(marker2.to_dict(), marker.to_dict())

        root_marker = Marker(id="fun")
        children = []
        for x in xrange(10):
            children.append(Marker(id=leaf_persistence_id_from_group_id(root_marker.id, x)))

        root_marker.children = [marker.id for marker in children]

        root_dict = root_marker.to_dict()

        self.assertTrue("children" in root_dict.keys())
        self.assertEqual(len(children), len(root_dict["children"]))

        for index, child_id in enumerate(root_dict["children"]):
            self.assertEqual(children[index].id, child_id)

        reconstituted_root = Marker.from_dict(root_dict)

        self.assertEqual(len(reconstituted_root.children), len(reconstituted_root.children_to_dict()))
    def test_success_and_combiner_called(self, queue_add_mock,
                                         success_mock,
                                         combiner_mock):
        """Ensure context success and internal vertex combiner
        is called when all the context's tasks are processed.
        """
        from furious.context import Context
        from furious.context._execution import _ExecutionContext
        from furious.processors import run_job
        from furious.job_utils import encode_callbacks

        with Context(callbacks=encode_callbacks(
                {'internal_vertex_combiner':
                     'furious.extras.combiners.lines_combiner',
                    'success':
                    'furious.extras.callbacks.small_aggregated'
                    '_results_success_callback'})) as ctx:
            job = ctx.add(pass_args_function, args=[1, 2])

        with _ExecutionContext(job):
            run_job()

        queue_add_mock.assert_called_once()
        combiner_mock.assert_called_once()
        success_mock.assert_called_once()
Пример #3
0
    def to_dict(self):
        import copy
        #        logger.debug("to dict %s"%self.id)
        options = copy.deepcopy(self._options)

        callbacks = self._options.get('callbacks')
        if callbacks:
            options['callbacks'] = encode_callbacks(callbacks)

        options['children'] = self.children_to_dict()
        options['work_time'] = self.work_time

        return options
Пример #4
0
def encode_async_options(async):
    """Encode Async options for JSON encoding."""
    options = copy.deepcopy(async._options)

    options['_type'] = reference_to_path(async.__class__)

    # JSON don't like datetimes.
    eta = options.get('task_args', {}).get('eta')
    if eta:
        import time

        options['task_args']['eta'] = time.mktime(eta.timetuple())

    callbacks = async._options.get('callbacks')
    if callbacks:
        options['callbacks'] = encode_callbacks(callbacks)

    return options
Пример #5
0
def encode_async_options(async):
    """Encode Async options for JSON encoding."""
    options = copy.deepcopy(async ._options)

    options['_type'] = reference_to_path(async .__class__)

    # JSON don't like datetimes.
    eta = options.get('task_args', {}).get('eta')
    if eta:
        import time

        options['task_args']['eta'] = time.mktime(eta.timetuple())

    callbacks = async ._options.get('callbacks')
    if callbacks:
        options['callbacks'] = encode_callbacks(callbacks)

    return options
Пример #6
0
    def to_dict(self):
        """Return this Context as a dict suitable for json encoding."""
        import copy

        options = copy.deepcopy(self._options)

        if self._insert_tasks:
            options['insert_tasks'] = reference_to_path(self._insert_tasks)

        if self._persistence_engine:
            options['persistence_engine'] = reference_to_path(
                self._persistence_engine)

        options.update({
            '_tasks_inserted': self._tasks_inserted,
        })

        callbacks = self._options.get('callbacks')
        if callbacks:
            options['callbacks'] = encode_callbacks(callbacks)

        return options
Пример #7
0
    def to_dict(self):
        """Return this Context as a dict suitable for json encoding."""
        import copy

        options = copy.deepcopy(self._options)

        if self._insert_tasks:
            options['insert_tasks'] = reference_to_path(self._insert_tasks)

        if self._persistence_engine:
            options['persistence_engine'] = reference_to_path(
                self._persistence_engine)

        options.update({
            '_tasks_inserted': self._tasks_inserted,
        })

        callbacks = self._options.get('callbacks')
        if callbacks:
            options['callbacks'] = encode_callbacks(callbacks)

        return options
Пример #8
0
def encode_async_options(async):
    """Encode Async options for JSON encoding."""
    options = copy.deepcopy(async._options)

    options['_type'] = reference_to_path(async.__class__)

    # JSON don't like datetimes.
    eta = options.get('task_args', {}).get('eta')
    if eta:
        options['task_args']['eta'] = time.mktime(eta.timetuple())

    callbacks = async._options.get('callbacks')
    if callbacks:
        options['callbacks'] = encode_callbacks(callbacks)

    if '_context_checker' in options:
        _checker = options.pop('_context_checker')
        options['__context_checker'] = reference_to_path(_checker)

    if '_process_results' in options:
        _processor = options.pop('_process_results')
        options['__process_results'] = reference_to_path(_processor)

    return options
Пример #9
0
        if self._insert_tasks:
            options['insert_tasks'] = reference_to_path(self._insert_tasks)

        if self._persistence_engine:
            options['persistence_engine'] = reference_to_path(
                self._persistence_engine)

        options.update({
            '_tasks_inserted': self._tasks_inserted,
            '_task_ids': [async.id for async in self._tasks]
        })

        callbacks = self._options.get('callbacks')
        if callbacks:
            options['callbacks'] = encode_callbacks(callbacks)

        return options

    @classmethod
    def from_dict(cls, context_options_dict):
        """Return a context job from a dict output by Context.to_dict."""
        import copy

        context_options = copy.deepcopy(context_options_dict)

        tasks_inserted = context_options.pop('_tasks_inserted', False)
        task_ids = context_options.pop('_task_ids', [])

        insert_tasks = context_options.pop('insert_tasks', None)
        if insert_tasks:
Пример #10
0
 def test_encode_no_callbacks(self):
     """Ensure encode_callbacks returns None when
     called with no callbacks
     """
     from furious.job_utils import encode_callbacks
     self.assertIsNone(encode_callbacks(None))