Esempio n. 1
0
 def test_link_on_scalar(self):
     x = Signature('TASK', link=Signature('B'))
     self.assertTrue(x.options['link'])
     x.link(Signature('C'))
     self.assertIsInstance(x.options['link'], list)
     self.assertIn(Signature('B'), x.options['link'])
     self.assertIn(Signature('C'), x.options['link'])
Esempio n. 2
0
 def test_link_on_scalar(self):
     x = Signature('TASK', link=Signature('B'))
     assert x.options['link']
     x.link(Signature('C'))
     assert isinstance(x.options['link'], list)
     assert Signature('B') in x.options['link']
     assert Signature('C') in x.options['link']
Esempio n. 3
0
 def test_tpc_abort_chain(t, _transaction, _stage_record):
     sig1 = Signature("mock.task", args=(4,), options={"task_id": "1"})
     sig2 = Signature("mock.task", args=(5,), options={"task_id": "2"})
     sig = chain(sig1, sig2)
     tx = Mock()
     t.dispatcher.add(sig)
     t.dispatcher.abort(tx)
     t.assertTupleEqual((), t.dispatcher.staged)
     t.storage.mdelete.assert_called_once_with("1", "2")
     t.assertListEqual([], t.dispatcher._signatures)
Esempio n. 4
0
 def test_add_chain(t, _transaction, _stage_record):
     sig1 = Signature("mock.task", args=(4,), options={"task_id": "1"})
     sig2 = Signature("mock.task", args=(5,), options={"task_id": "2"})
     sig = chain(sig1, sig2)
     t.dispatcher.add(sig)
     expected = (sig1.id, sig2.id)
     t.assertTupleEqual(expected, t.dispatcher.staged)
     tx = _transaction.get.return_value
     tx.join.assert_called_once_with(t.dispatcher)
     calls = [call(t.storage, sig1), call(t.storage, sig2)]
     _stage_record.assert_has_calls(calls)
Esempio n. 5
0
 def test_tpc_finish_chain(t, _transaction, _stage, _commit):
     sig1 = Signature("mock.task", args=(4,), options={"task_id": "1"})
     sig2 = Signature("mock.task", args=(5,), options={"task_id": "2"})
     sig = chain(sig1, sig2)
     apply_mock = Mock()
     sig.apply_async = apply_mock
     tx = Mock()
     t.dispatcher.add(sig)
     t.dispatcher.tpc_finish(tx)
     t.assertTupleEqual((), t.dispatcher.staged)
     t.assertListEqual([], t.dispatcher._signatures)
     t.storage.mdelete.assert_not_called()
     apply_mock.assert_called_once_with()
     calls = [call(t.storage, sig1), call(t.storage, sig2)]
     _commit.assert_has_calls(calls)
Esempio n. 6
0
 def test_add(t, _transaction, _stage_record):
     sig = Signature("mock.task", args=(10,), options={"task_id": "1"})
     t.dispatcher.add(sig)
     t.assertTupleEqual((sig.id,), t.dispatcher.staged)
     tx = _transaction.get.return_value
     tx.join.assert_called_once_with(t.dispatcher)
     _stage_record.assert_called_once_with(t.storage, sig)
Esempio n. 7
0
 def test_replace(self):
     x = Signature("TASK", ("A"), {})
     self.assertTupleEqual(x.replace(args=("B", )).args, ("B", ))
     self.assertDictEqual(x.replace(kwargs={"FOO": "BAR"}).kwargs,
             {"FOO": "BAR"})
     self.assertDictEqual(x.replace(options={"task_id": "123"}).options,
             {"task_id": "123"})
Esempio n. 8
0
def _add_jobs_for_group(nodes_to_update, dependencies, grp, groups, existing_cache_jobs) -> Set:
    cache_jobs = set()
    after_jobs = []
    jobs = []

    for node in grp:
        if node.pk in nodes_to_update:
            update_job = node.get_update_task()

            # Cache jobs are separated, and put in a set to remove dupes such as when >=2 venn's have the same parents
            task_args_objs_set = node.get_cache_task_args_objs_set()
            new_cache_jobs = task_args_objs_set - existing_cache_jobs
            if new_cache_jobs:
                cache_jobs.update(new_cache_jobs)
                after_jobs.append(update_job)
            else:
                jobs.append(update_job)
        elif node.pk in dependencies:
            # Sometimes nodes may be already loading from another update - need to keep dependencies on existing task
            # and wait on loading parent tasks
            if NodeStatus.is_loading(node.status):
                jobs.append(wait_for_node.si(node.pk))  # @UndefinedVariable

    if cache_jobs:
        for task, args, _ in cache_jobs:
            if task:
                jobs.append(Signature(task, args=args, immutable=True))

    _add_jobs_to_groups(jobs, groups)
    _add_jobs_to_groups(after_jobs, groups)
    return cache_jobs
Esempio n. 9
0
 def test_set(self):
     self.assertDictEqual(
         Signature('TASK', x=1).set(task_id='2').options,
         {
             'x': 1,
             'task_id': '2'
         },
     )
Esempio n. 10
0
 def test_replace(self):
     x = Signature('TASK', ('A'), {})
     assert x.replace(args=('B',)).args == ('B',)
     assert x.replace(kwargs={'FOO': 'BAR'}).kwargs == {
         'FOO': 'BAR',
     }
     assert x.replace(options={'task_id': '123'}).options == {
         'task_id': '123',
     }
Esempio n. 11
0
 def test_replace(self):
     x = Signature('TASK', ('A'), {})
     self.assertTupleEqual(x.replace(args=('B', )).args, ('B', ))
     self.assertDictEqual(
         x.replace(kwargs={'FOO': 'BAR'}).kwargs,
         {'FOO': 'BAR'},
     )
     self.assertDictEqual(
         x.replace(options={'task_id': '123'}).options,
         {'task_id': '123'},
     )
Esempio n. 12
0
 def __call__(self, *args: Any, **kwargs: Any) -> str:
     self.func(*args, **kwargs)
     s = Signature(
         args=args,
         kwargs=kwargs,
         task=self.name,
         app=self.app,
         task_type=self.app.Task,
         routing_key=self.name)
     result: AsyncResult = s.apply_async()
     return result.id
Esempio n. 13
0
 def chord_context(self, size=1):
     with patch('celery.backends.redis.maybe_signature') as ms:
         tasks = [self.create_task() for i in range(size)]
         request = Mock(name='request')
         request.id = 'id1'
         request.group = 'gid1'
         callback = ms.return_value = Signature('add')
         callback.id = 'id1'
         callback['chord_size'] = size
         callback.delay = Mock(name='callback.delay')
         yield tasks, request, callback
Esempio n. 14
0
 def test_tpc_finish(t, _transaction, _stage, _commit):
     sig = Signature("mock.task", args=(10,), options={"task_id": "1"})
     apply_mock = Mock()
     sig.apply_async = apply_mock
     tx = Mock()
     t.dispatcher.add(sig)
     t.dispatcher.tpc_finish(tx)
     t.assertTupleEqual((), t.dispatcher.staged)
     t.assertListEqual([], t.dispatcher._signatures)
     t.storage.mdelete.assert_not_called()
     apply_mock.assert_called_once_with()
     _commit.assert_called_once_with(t.storage, sig)
Esempio n. 15
0
 def chord_context(self, size=1):
     with patch('celery.backends.redis.maybe_signature') as ms:
         request = Mock(name='request')
         request.id = 'id1'
         group_id = 'gid1'
         request.group = group_id
         request.group_index = None
         tasks = [
             self.create_task(i, group_id=request.group)
             for i in range(size)
         ]
         callback = ms.return_value = Signature('add')
         callback.id = 'id1'
         self.b.set_chord_size(group_id, size)
         callback.delay = Mock(name='callback.delay')
         yield tasks, request, callback
Esempio n. 16
0
    async def pdf_declaration_pipeline(self, exception):
        if not exception:
            if self.pdf_declaration_url_lst:
                await simple_log(
                    self,
                    diy.DIY_STATUS.value,
                    message=
                    f'下载pdf:declaration body 共计{len(self.pdf_declaration_url_lst)}'
                )
                tasks = [
                    self.get_pdf_bin(u) for u in self.pdf_declaration_url_lst
                ]

                # 并行还是串行
                if self.pdf_concurrence:
                    await asyncio.wait(tasks)
                else:
                    for f in tasks:
                        await f

            if self.pdf_declaration_bin_lst:

                tasks = [
                    Signature(current_app.config.PDF_TASK, [
                        pdbs.body,
                        pdbs.declarationMeta._asdict(), {
                            'province': self.task_params['province'],
                            'company_name': self.task_params['company_name']
                        }
                    ],
                              queue=current_app.config.PDF_QUEUE,
                              app=current_app.app)
                    for pdbs in self.pdf_declaration_bin_lst
                ]
                job = group(*tasks)
                group_result = job.apply_async()
                await simple_log(
                    self,
                    diy.DIY_STATUS.value,
                    message=
                    f'已提交pdf:declaration任务 共计{len(self.pdf_declaration_bin_lst)} task_id'
                )
                while not group_result.ready():
                    await asyncio.sleep(.1)
                success_results = [
                    async_result.info for async_result in group_result.results
                    if async_result.state == 'SUCCESS'
                ]
                await simple_log(
                    self,
                    diy.DIY_STATUS.value,
                    message=
                    f'获得提交pdf:declaration任务结果 共计{len(success_results)} 成功任务')
                for result in success_results:
                    if result and (not result.get('code')):
                        declaration = json_data_mapper(result, DeclarationInfo)
                        self.declaration.declarationInfo.append(declaration)
                await simple_log(
                    self,
                    diy.DIY_STATUS.value,
                    message=
                    f'实际获得declaration对象数 共计{len(self.declaration.declarationInfo)}'
                )
Esempio n. 17
0
def update_analysis(analysis_id):
    """ Launches async job to update analysis """
    task = Signature("analysis.tasks.analysis_update_tasks.create_and_launch_analysis_tasks", args=(analysis_id,))
    task.apply_async()
Esempio n. 18
0
 def test_call(self):
     x = Signature('foo', (1, 2), {'arg1': 33}, app=self.app)
     x.type = Mock(name='type')
     x(3, 4, arg2=66)
     x.type.assert_called_with(3, 4, 1, 2, arg1=33, arg2=66)
Esempio n. 19
0
 def test_reduce(self):
     x = Signature('TASK', (2, 4), app=self.app)
     fun, args = x.__reduce__()
     self.assertEqual(fun(*args), x)
Esempio n. 20
0
 def test_json(self):
     x = Signature('TASK', link=Signature('B', app=self.app), app=self.app)
     self.assertDictEqual(x.__json__(), dict(x))
Esempio n. 21
0
from celery.result import EagerResult

from celery.tests.case import (
    AppCase,
    ContextMock,
    MagicMock,
    Mock,
    depends_on_current_app,
)

SIG = Signature({
    'task': 'TASK',
    'args': ('A1', ),
    'kwargs': {
        'K1': 'V1'
    },
    'options': {
        'task_id': 'TASK_ID'
    },
    'subtask_type': ''
})


class test_maybe_unroll_group(AppCase):
    def test_when_no_len_and_no_length_hint(self):
        g = MagicMock(name='group')
        g.tasks.__len__.side_effect = TypeError()
        g.tasks.__length_hint__ = Mock()
        g.tasks.__length_hint__.return_value = 0
        self.assertIs(maybe_unroll_group(g), g)
        g.tasks.__length_hint__.side_effect = AttributeError()
Esempio n. 22
0
 def test_set(self):
     self.assertDictEqual(Signature("TASK", x=1).set(task_id="2").options,
             {"x": 1, "task_id": "2"})
Esempio n. 23
0
def is_ready(result):
    return result.ready()


if __name__ == "__main__":
    with concurrent.futures.ThreadPoolExecutor() as executor:
        get_mem_usage()
        for _ in range(10):
            for _ in range(10):
                args = ({
                    'foo': gen_big_str(),
                    'bar': [{
                        'bla': gen_big_str()
                    }]
                }, )
                if LEAK:
                    future = executor.submit(app.send_task,
                                             'hello',
                                             args=args,
                                             chain=[Signature('hello')])
                    result = future.result()
                    while not executor.submit(is_ready, result).result():
                        time.sleep(0.001)
                else:
                    result = app.send_task('hello',
                                           args=args,
                                           chain=[Signature('hello')])
                assert result.get()
            gc.collect()
            get_mem_usage()
Esempio n. 24
0
from __future__ import absolute_import
from __future__ import with_statement

from mock import Mock

from celery import current_app, task
from celery.canvas import Signature, chain, group, chord, subtask
from celery.result import EagerResult

from celery.tests.utils import Case

SIG = Signature({"task": "TASK",
                 "args": ("A1", ),
                 "kwargs": {"K1": "V1"},
                 "options": {"task_id": "TASK_ID"},
                 "subtask_type": ""})


@task
def add(x, y):
    return x + y


@task
def mul(x, y):
    return x * y


@task
def div(x, y):
    return x / y
Esempio n. 25
0
 def test_reduce(self):
     x = Signature('TASK', (2, 4), app=self.app)
     fun, args = x.__reduce__()
     assert fun(*args) == x
Esempio n. 26
0
 def test_json(self):
     x = Signature('TASK', link=Signature('B', app=self.app), app=self.app)
     assert x.__json__() == dict(x)
Esempio n. 27
0
 def test_discard(t, _transaction, _stage_record):
     sig = Signature("mock.task", args=(10,), options={"task_id": "1"})
     t.dispatcher.add(sig)
     t.dispatcher.discard("1")
     t.assertTupleEqual((), t.dispatcher.staged)
     t.storage.mdelete.assert_called_once_with("1")
Esempio n. 28
0
 def test_set(self):
     assert Signature('TASK', x=1).set(task_id='2').options == {
         'x': 1,
         'task_id': '2',
     }
Esempio n. 29
0
 def get_update_task(self):
     return Signature(self.UPDATE_TASK,
                      args=(self.pk, self.version),
                      immutable=True)