Пример #1
0
 def test_link_on_scalar(self):
     x = Signature('TASK', link=Signature('B'))
     self.assertTrue(x.options['link'])
     x.link(Signature('C'))
     self.assertIsInstance(x.options['link'], list)
     self.assertIn(Signature('B'), x.options['link'])
     self.assertIn(Signature('C'), x.options['link'])
Пример #2
0
 def test_replace(self):
     x = Signature('TASK', ('A'), {})
     self.assertTupleEqual(x.replace(args=('B', )).args, ('B', ))
     self.assertDictEqual(x.replace(kwargs={'FOO': 'BAR'}).kwargs,
             {'FOO': 'BAR'})
     self.assertDictEqual(x.replace(options={'task_id': '123'}).options,
             {'task_id': '123'})
Пример #3
0
 def test_link_on_scalar(self):
     x = Signature('TASK', link=Signature('B'))
     assert x.options['link']
     x.link(Signature('C'))
     assert isinstance(x.options['link'], list)
     assert Signature('B') in x.options['link']
     assert Signature('C') in x.options['link']
Пример #4
0
 def test_replace(self):
     x = Signature("TASK", ("A"), {})
     self.assertTupleEqual(x.replace(args=("B", )).args, ("B", ))
     self.assertDictEqual(x.replace(kwargs={"FOO": "BAR"}).kwargs,
             {"FOO": "BAR"})
     self.assertDictEqual(x.replace(options={"task_id": "123"}).options,
             {"task_id": "123"})
Пример #5
0
 def test_replace(self):
     x = Signature('TASK', ('A'), {})
     assert x.replace(args=('B',)).args == ('B',)
     assert x.replace(kwargs={'FOO': 'BAR'}).kwargs == {
         'FOO': 'BAR',
     }
     assert x.replace(options={'task_id': '123'}).options == {
         'task_id': '123',
     }
Пример #6
0
def postcommit_after_request(response, base_status_error_code=500):
    if response.status_code >= base_status_error_code:
        _local.postcommit_queue = OrderedDict()
        _local.postcommit_celery_queue = OrderedDict()
        return response
    try:
        if postcommit_queue():
            number_of_threads = 30  # one db connection per greenlet, let's share
            pool = Pool(number_of_threads)
            for func in postcommit_queue().values():
                pool.spawn(func)
            pool.join(timeout=5.0, raise_error=True)  # 5 second timeout and reraise exceptions

        if postcommit_celery_queue():
            if settings.USE_CELERY:
                for task_dict in postcommit_celery_queue().values():
                    task = Signature.from_dict(task_dict)
                    task.apply_async()
            else:
                for task in postcommit_celery_queue().values():
                    task()

    except AttributeError as ex:
        if not settings.DEBUG_MODE:
            logger.error('Post commit task queue not initialized: {}'.format(ex))
    return response
Пример #7
0
def setup_cluster(task, *args, **kwargs):
    cluster = kwargs['cluster']

    if '_id' in cluster:
        task.taskflow.logger.info(
            'We are using an existing cluster: %s' % cluster['name'])
    else:
        task.taskflow.logger.info('We are creating an EC2 cluster.')
        task.logger.info('Cluster name %s' % cluster['name'])
        kwargs['machine'] = cluster.get('machine')
        profile = kwargs.get('profile')
        cluster = create_ec2_cluster(
            task, cluster, profile, kwargs['image_spec'])
        task.logger.info('Cluster started.')

    # Call any follow on task
    if 'next' in kwargs:
        kwargs['cluster'] = cluster
        next = Signature.from_dict(kwargs['next'])

        if next.task == 'celery.chain':
            # If we are dealing with a chain we want to update the arg and
            # kwargs passed into the chain.
            first_task = next.kwargs['tasks'][0]
            if first_task:
                if args:
                    first_task.args = tuple(args) + tuple(first_task.args)

                if kwargs:
                    first_task.kwargs = dict(first_task.kwargs, **kwargs)

        next.delay(*args, **kwargs)
Пример #8
0
        def prepare_steps(self, args, tasks):
            steps = deque(tasks)
            next_step = prev_task = prev_res = None
            tasks, results = [], []
            i = 0
            while steps:
                # First task get partial args from chain.
                task = maybe_subtask(steps.popleft())
                task = task.clone() if i else task.clone(args)
                res = task.freeze()
                i += 1

                if isinstance(task, group):
                    # automatically upgrade group(..) | s to chord(group, s)
                    try:
                        next_step = steps.popleft()
                        # for chords we freeze by pretending it's a normal
                        # task instead of a group.
                        res = Signature.freeze(task)
                        task = chord(task, body=next_step, task_id=res.task_id)
                    except IndexError:
                        pass  # no callback, so keep as group
                if prev_task:
                    # link previous task to this task.
                    prev_task.link(task)
                    # set the results parent attribute.
                    res.parent = prev_res

                if not isinstance(prev_task, chord):
                    results.append(res)
                    tasks.append(task)
                prev_task, prev_res = task, res

            return tasks, results
Пример #9
0
        def prepare_steps(self, args, tasks):
            app = self.app
            steps = deque(tasks)
            next_step = prev_task = prev_res = None
            tasks, results = [], []
            i = 0
            while steps:
                # First task get partial args from chain.
                task = maybe_signature(steps.popleft(), app=app)
                task = task.clone() if i else task.clone(args)
                res = task.freeze()
                i += 1

                if isinstance(task, group):
                    task = maybe_unroll_group(task)
                if isinstance(task, chain):
                    # splice the chain
                    steps.extendleft(reversed(task.tasks))
                    continue

                elif isinstance(task, group) and steps and \
                        not isinstance(steps[0], group):
                    # automatically upgrade group(..) | s to chord(group, s)
                    try:
                        next_step = steps.popleft()
                        # for chords we freeze by pretending it's a normal
                        # task instead of a group.
                        res = Signature.freeze(next_step)
                        task = chord(task, body=next_step, task_id=res.task_id)
                    except IndexError:
                        pass  # no callback, so keep as group
                if prev_task:
                    # link previous task to this task.
                    prev_task.link(task)
                    # set the results parent attribute.
                    if not res.parent:
                        res.parent = prev_res

                if not isinstance(prev_task, chord):
                    results.append(res)
                    tasks.append(task)
                prev_task, prev_res = task, res

            print(tasks)

            return tasks, results
Пример #10
0
        def prepare_steps(self, args, tasks):
            app = self.app
            steps = deque(tasks)
            next_step = prev_task = prev_res = None
            tasks, results = [], []
            i = 0
            while steps:
                # First task get partial args from chain.
                task = maybe_signature(steps.popleft(), app=app)
                task = task.clone() if i else task.clone(args)
                res = task.freeze()
                i += 1

                if isinstance(task, group):
                    task = maybe_unroll_group(task)
                if isinstance(task, chain):
                    # splice the chain
                    steps.extendleft(reversed(task.tasks))
                    continue

                elif isinstance(task, group) and steps and \
                        not isinstance(steps[0], group):
                    # automatically upgrade group(..) | s to chord(group, s)
                    try:
                        next_step = steps.popleft()
                        # for chords we freeze by pretending it's a normal
                        # task instead of a group.
                        res = Signature.freeze(next_step)
                        task = chord(task, body=next_step, task_id=res.task_id)
                    except IndexError:
                        pass  # no callback, so keep as group
                if prev_task:
                    # link previous task to this task.
                    prev_task.link(task)
                    # set the results parent attribute.
                    if not res.parent:
                        res.parent = prev_res

                if not isinstance(prev_task, chord):
                    results.append(res)
                    tasks.append(task)
                prev_task, prev_res = task, res

            return tasks, results
Пример #11
0
def setup_cluster(task, *args,**kwargs):
    cluster = kwargs['cluster']

    if '_id' in cluster:
        task.taskflow.logger.info('We are using an existing cluster: %s' % cluster['name'])
    else:
        task.taskflow.logger.info('We are creating an EC2 cluster.')
        task.logger.info('Cluster name %s' % cluster['name'])
        kwargs['machine'] = cluster.get('machine')
        ami = kwargs.get('ami')
        profile = kwargs.get('profile')
        cluster = create_ec2_cluster(task, cluster, profile, ami)
        task.logger.info('Cluster started.')

    # Call any follow on task
    if 'next' in kwargs:
        kwargs['cluster'] = cluster
        next = Signature.from_dict(kwargs['next'])
        next.delay(*args, **kwargs)
Пример #12
0
    def apply_async(self, args=(), kwargs={}, route_name=None, **options):
        """Apply this task asynchronously.

        Arguments:
            args (Tuple): Partial args to be prepended to the existing args.
            kwargs (Dict): Partial kwargs to be merged with existing kwargs.
            options (Dict): Partial options to be merged
                with existing options.

        Returns:
            ~@AsyncResult: promise of future evaluation.

        See also:
            :meth:`[email protected]_async` and the :ref:`guide-calling` guide.
        """
        jobid = CelerySignature.apply_async(self, args, kwargs, route_name,
                                            **options)
        task = TaskResult.task_pending(str(jobid))
        logger.debug('Create new task: %s' % task)
        return jobid
Пример #13
0
 def inner(*args, **kwargs):
     taskset = kwargs.pop("_taskset", None)
     rv = f(*args, **kwargs)
     if taskset is not None:
         done = False
         with transaction.atomic():
             taskset_id = taskset["taskset_id"]
             sync_row = TaskSetMeta.objects.select_for_update().filter(
                 id=taskset_id).all()
             if sync_row:
                 assert len(sync_row) == 1
                 sync_row = sync_row[0]
                 sync_row.count -= 1
                 sync_row.save()
                 if sync_row.count == 0:
                     logger.info("Finished taskset id %i" % taskset_id)
                     done = True
                 else:
                     logger.info("Taskset %i has %i tasks remaining" % (taskset_id, sync_row.count))
         if done:
             callback = Signature.from_dict(taskset["callback"])
             callback.apply_async()
     return rv
Пример #14
0
 def inner(*args, **kwargs):
     taskset = kwargs.pop("_taskset", None)
     rv = f(*args, **kwargs)
     if taskset is not None:
         done = False
         with transaction.atomic():
             taskset_id = taskset["taskset_id"]
             sync_row = TaskSetMeta.objects.select_for_update().filter(
                 id=taskset_id).all()
             if sync_row:
                 assert len(sync_row) == 1
                 sync_row = sync_row[0]
                 sync_row.count -= 1
                 sync_row.save()
                 if sync_row.count == 0:
                     logger.info("Finished taskset id %i" % taskset_id)
                     done = True
                 else:
                     logger.info("Taskset %i has %i tasks remaining" %
                                 (taskset_id, sync_row.count))
         if done:
             callback = Signature.from_dict(taskset["callback"])
             callback.apply_async()
     return rv
Пример #15
0
    def test_tpc_finish_multiple(t, _transaction, _stage, _commit):
        sig1 = Signature("mock.task", args=(10,), options={"task_id": "1"})
        sig2 = Signature("mock.task", args=(11,), options={"task_id": "2"})
        apply1 = Mock()
        sig1.apply_async = apply1
        apply2 = Mock()
        sig2.apply_async = apply2
        tx = Mock()

        t.dispatcher.add(sig1)
        t.dispatcher.add(sig2)

        t.dispatcher.tpc_finish(tx)
        t.assertTupleEqual((), t.dispatcher.staged)
        t.assertListEqual([], t.dispatcher._signatures)
        t.storage.mdelete.assert_not_called()
        apply1.assert_called_once_with()
        apply2.assert_called_once_with()
        calls = [call(t.storage, sig1), call(t.storage, sig2)]
        _commit.assert_has_calls(calls)
Пример #16
0
 def test_set(self):
     assert Signature('TASK', x=1).set(task_id='2').options == {
         'x': 1,
         'task_id': '2',
     }
Пример #17
0
 def test_json(self):
     x = Signature('TASK', link=Signature('B', app=self.app), app=self.app)
     self.assertDictEqual(x.__json__(), dict(x))
Пример #18
0
 def test_reduce(self):
     x = Signature('TASK', (2, 4), app=self.app)
     fun, args = x.__reduce__()
     assert fun(*args) == x
Пример #19
0
 def test_json(self):
     x = Signature('TASK', link=Signature('B', app=self.app), app=self.app)
     assert x.__json__() == dict(x)
Пример #20
0
def set_label(sig: Signature, label):
    sig.set(label=label)
Пример #21
0
def setup_cluster(task, *args, **kwargs):
    cluster = kwargs['cluster']
    profile = kwargs.get('profile')
    volume = kwargs.get('volume')
    new = False

    if '_id' in cluster:
        task.taskflow.logger.info('We are using an existing cluster: %s' %
                                  cluster['name'])
    else:
        new = True
        task.taskflow.logger.info('We are creating an EC2 cluster.')
        task.logger.info('Cluster name %s' % cluster['name'])
        kwargs['machine'] = cluster.get('machine')

        if volume:
            config = cluster.setdefault('config', {})
            config['jobOutputDir'] = '/data'

        # Create the model in Girder
        cluster = create_ec2_cluster(task, cluster, profile,
                                     kwargs['image_spec'])

        # Now launch the cluster
        cluster = launch_ec2_cluster(task, cluster, profile)

        task.logger.info('Cluster started.')

    if volume and '_id' in volume:
        task.taskflow.logger.info('We are using an existing volume: %s' %
                                  volume['name'])
    elif volume:
        task.taskflow.logger.info('We are creating a new volume: "%s"' %
                                  volume['name'])
        volume = create_volume(task, volume, profile)

    # Now provision
    if new:
        provision_params = {}

        girder_token = task.taskflow.girder_token
        check_girder_cluster_status(cluster, girder_token, 'provisioning')

        # attach volume
        if volume:
            volume = _attach_volume(task, profile, volume, cluster)
            path = volume.get('path')
            if path:
                provision_params['master_nfs_exports_extra'] = [path]

        cluster = provision_ec2_cluster(task, cluster, profile,
                                        provision_params)

    # Call any follow on task
    if 'next' in kwargs:
        kwargs['cluster'] = cluster
        next = Signature.from_dict(kwargs['next'])

        if next.task == 'celery.chain':
            # If we are dealing with a chain we want to update the arg and
            # kwargs passed into the chain.
            first_task = next.kwargs['tasks'][0]
            if first_task:
                if args:
                    first_task.args = tuple(args) + tuple(first_task.args)

                if kwargs:
                    first_task.kwargs = dict(first_task.kwargs, **kwargs)

        next.delay(*args, **kwargs)
Пример #22
0
def postcommit_celery_task_wrapper(queue):
    # chain.apply calls the tasks synchronously without re-enqueuing each one
    # http://stackoverflow.com/questions/34177131/how-to-solve-python-celery-error-when-using-chain-encodeerrorruntimeerrormaxi?answertab=votes#tab-top
    # celery serialized signatures into dictionaries, so we need to deserialize here
    # https://sentry.cos.io/sentry/osf-iy/issues/289209/
    chain([Signature.from_dict(task_dict) for task_dict in queue.values()]).apply()
Пример #23
0
from celery.result import EagerResult

from celery.tests.case import (
    AppCase,
    ContextMock,
    MagicMock,
    Mock,
    depends_on_current_app,
)

SIG = Signature({
    'task': 'TASK',
    'args': ('A1', ),
    'kwargs': {
        'K1': 'V1'
    },
    'options': {
        'task_id': 'TASK_ID'
    },
    'subtask_type': ''
})


class test_maybe_unroll_group(AppCase):
    def test_when_no_len_and_no_length_hint(self):
        g = MagicMock(name='group')
        g.tasks.__len__.side_effect = TypeError()
        g.tasks.__length_hint__ = Mock()
        g.tasks.__length_hint__.return_value = 0
        self.assertIs(maybe_unroll_group(g), g)
        g.tasks.__length_hint__.side_effect = AttributeError()
Пример #24
0
from __future__ import absolute_import
from __future__ import with_statement

from mock import Mock

from celery import current_app, task
from celery.canvas import Signature, chain, group, chord, subtask
from celery.result import EagerResult

from celery.tests.utils import Case

SIG = Signature({"task": "TASK",
                 "args": ("A1", ),
                 "kwargs": {"K1": "V1"},
                 "options": {"task_id": "TASK_ID"},
                 "subtask_type": ""})


@task
def add(x, y):
    return x + y


@task
def mul(x, y):
    return x * y


@task
def div(x, y):
    return x / y
Пример #25
0
 def test_reduce(self):
     x = Signature('TASK', (2, 4), app=self.app)
     fun, args = x.__reduce__()
     self.assertEqual(fun(*args), x)
Пример #26
0
def is_ready(result):
    return result.ready()


if __name__ == "__main__":
    with concurrent.futures.ThreadPoolExecutor() as executor:
        get_mem_usage()
        for _ in range(10):
            for _ in range(10):
                args = ({
                    'foo': gen_big_str(),
                    'bar': [{
                        'bla': gen_big_str()
                    }]
                }, )
                if LEAK:
                    future = executor.submit(app.send_task,
                                             'hello',
                                             args=args,
                                             chain=[Signature('hello')])
                    result = future.result()
                    while not executor.submit(is_ready, result).result():
                        time.sleep(0.001)
                else:
                    result = app.send_task('hello',
                                           args=args,
                                           chain=[Signature('hello')])
                assert result.get()
            gc.collect()
            get_mem_usage()
Пример #27
0
 def test_set(self):
     self.assertDictEqual(
         Signature('TASK', x=1).set(task_id='2').options,
         {'x': 1, 'task_id': '2'},
     )
Пример #28
0
 def test_json(self):
     x = Signature('TASK', link=Signature('B', app=self.app), app=self.app)
     assert x.__json__() == dict(x)
Пример #29
0
 def test_reduce(self):
     x = Signature('TASK', (2, 4), app=self.app)
     fun, args = x.__reduce__()
     self.assertEqual(fun(*args), x)
Пример #30
0
 def test_reduce(self):
     x = Signature('TASK', (2, 4), app=self.app)
     fun, args = x.__reduce__()
     assert fun(*args) == x
Пример #31
0
def update_analysis(analysis_id):
    """ Launches async job to update analysis """
    task = Signature("analysis.tasks.analysis_update_tasks.create_and_launch_analysis_tasks", args=(analysis_id,))
    task.apply_async()
Пример #32
0
 def test_set(self):
     self.assertDictEqual(Signature("TASK", x=1).set(task_id="2").options,
             {"x": 1, "task_id": "2"})
Пример #33
0
 def get_update_task(self):
     return Signature(self.UPDATE_TASK,
                      args=(self.pk, self.version),
                      immutable=True)
Пример #34
0
 def test_json(self):
     x = Signature('TASK', link=Signature('B', app=self.app), app=self.app)
     self.assertDictEqual(x.__json__(), dict(x))
Пример #35
0
 def test_call(self):
     x = Signature('foo', (1, 2), {'arg1': 33}, app=self.app)
     x.type = Mock(name='type')
     x(3, 4, arg2=66)
     x.type.assert_called_with(3, 4, 1, 2, arg1=33, arg2=66)
Пример #36
0
 def test_call(self):
     x = Signature('foo', (1, 2), {'arg1': 33}, app=self.app)
     x.type = Mock(name='type')
     x(3, 4, arg2=66)
     x.type.assert_called_with(3, 4, 1, 2, arg1=33, arg2=66)
Пример #37
0
    async def pdf_declaration_pipeline(self, exception):
        if not exception:
            if self.pdf_declaration_url_lst:
                await simple_log(
                    self,
                    diy.DIY_STATUS.value,
                    message=
                    f'下载pdf:declaration body 共计{len(self.pdf_declaration_url_lst)}'
                )
                tasks = [
                    self.get_pdf_bin(u) for u in self.pdf_declaration_url_lst
                ]

                # 并行还是串行
                if self.pdf_concurrence:
                    await asyncio.wait(tasks)
                else:
                    for f in tasks:
                        await f

            if self.pdf_declaration_bin_lst:

                tasks = [
                    Signature(current_app.config.PDF_TASK, [
                        pdbs.body,
                        pdbs.declarationMeta._asdict(), {
                            'province': self.task_params['province'],
                            'company_name': self.task_params['company_name']
                        }
                    ],
                              queue=current_app.config.PDF_QUEUE,
                              app=current_app.app)
                    for pdbs in self.pdf_declaration_bin_lst
                ]
                job = group(*tasks)
                group_result = job.apply_async()
                await simple_log(
                    self,
                    diy.DIY_STATUS.value,
                    message=
                    f'已提交pdf:declaration任务 共计{len(self.pdf_declaration_bin_lst)} task_id'
                )
                while not group_result.ready():
                    await asyncio.sleep(.1)
                success_results = [
                    async_result.info for async_result in group_result.results
                    if async_result.state == 'SUCCESS'
                ]
                await simple_log(
                    self,
                    diy.DIY_STATUS.value,
                    message=
                    f'获得提交pdf:declaration任务结果 共计{len(success_results)} 成功任务')
                for result in success_results:
                    if result and (not result.get('code')):
                        declaration = json_data_mapper(result, DeclarationInfo)
                        self.declaration.declarationInfo.append(declaration)
                await simple_log(
                    self,
                    diy.DIY_STATUS.value,
                    message=
                    f'实际获得declaration对象数 共计{len(self.declaration.declarationInfo)}'
                )
Пример #38
0
 def test_discard(t, _transaction, _stage_record):
     sig = Signature("mock.task", args=(10,), options={"task_id": "1"})
     t.dispatcher.add(sig)
     t.dispatcher.discard("1")
     t.assertTupleEqual((), t.dispatcher.staged)
     t.storage.mdelete.assert_called_once_with("1")
Пример #39
0
def set_execution_options(sig: Signature, **options):
    """Set arbitrary executions options in every task in the :attr:`sig`"""
    try:
        [task.set(**options) for task in sig.tasks]
    except AttributeError:
        sig.set(**options)