Ejemplo n.º 1
0
 def test_getattr__setattr(self):
     x = AttributeDict({'foo': 'bar'})
     assert x['foo'] == 'bar'
     with pytest.raises(AttributeError):
         x.bar
     x.bar = 'foo'
     assert x['bar'] == 'foo'
Ejemplo n.º 2
0
 def test_getattr__setattr(self):
     x = AttributeDict({'foo': 'bar'})
     assert x['foo'] == 'bar'
     with pytest.raises(AttributeError):
         x.bar
     x.bar = 'foo'
     assert x['bar'] == 'foo'
Ejemplo n.º 3
0
 def test_getattr__setattr(self):
     x = AttributeDict({'foo': 'bar'})
     self.assertEqual(x['foo'], 'bar')
     with self.assertRaises(AttributeError):
         x.bar
     x.bar = 'foo'
     self.assertEqual(x['bar'], 'foo')
 def test_getattr__setattr(self):
     x = AttributeDict({'foo': 'bar'})
     self.assertEqual(x['foo'], 'bar')
     with self.assertRaises(AttributeError):
         x.bar
     x.bar = 'foo'
     self.assertEqual(x['bar'], 'foo')
Ejemplo n.º 5
0
    def test_process_initializer(self, _signals, set_mp_process_title,
                                 restore_logging):
        from celery import signals
        from celery._state import _tls
        from celery.concurrency.prefork import WORKER_SIGIGNORE, WORKER_SIGRESET, process_initializer
        on_worker_process_init = Mock()
        signals.worker_process_init.connect(on_worker_process_init)

        with self.Celery(loader=self.Loader) as app:
            app.conf = AttributeDict(DEFAULTS)
            process_initializer(app, 'awesome.worker.com')
            _signals.ignore.assert_any_call(*WORKER_SIGIGNORE)
            _signals.reset.assert_any_call(*WORKER_SIGRESET)
            assert app.loader.init_worker.call_count
            on_worker_process_init.assert_called()
            assert _tls.current_app is app
            set_mp_process_title.assert_called_with(
                'celeryd',
                hostname='awesome.worker.com',
            )

            with patch('celery.app.trace.setup_worker_optimizations') as S:
                os.environ['FORKED_BY_MULTIPROCESSING'] = '1'
                try:
                    process_initializer(app, 'luke.worker.com')
                    S.assert_called_with(app, 'luke.worker.com')
                finally:
                    os.environ.pop('FORKED_BY_MULTIPROCESSING', None)

            os.environ['CELERY_LOG_FILE'] = 'worker%I.log'
            app.log.setup = Mock(name='log_setup')
            try:
                process_initializer(app, 'luke.worker.com')
            finally:
                os.environ.pop('CELERY_LOG_FILE', None)
Ejemplo n.º 6
0
 def test_conf_raises_KeyError(self):
     self.app.conf = AttributeDict({
         'result_serializer': 'json',
         'result_cache_max': 1,
         'result_expires': None,
         'accept_content': ['json'],
     })
     self.Backend(app=self.app)
Ejemplo n.º 7
0
    def test_pdeath_sig(self, _set_pdeathsig, set_mp_process_title, restore_logging):
        from celery import signals
        on_worker_process_init = Mock()
        signals.worker_process_init.connect(on_worker_process_init)
        from celery.concurrency.prefork import process_initializer

        with self.Celery(loader=self.Loader) as app:
            app.conf = AttributeDict(DEFAULTS)
            process_initializer(app, 'awesome.worker.com')
        _set_pdeathsig.assert_called_once_with('SIGKILL')
Ejemplo n.º 8
0
 def __init__(self, c):
     self.c = c
     self.hostname = c.hostname
     self.node = c.app.control.mailbox.Node(
         safe_str(c.hostname),
         handlers=control.Panel.data,
         state=AttributeDict(
             app=c.app,
             hostname=c.hostname,
             consumer=c,
             tset=pass1 if c.controller.use_eventloop else set),
     )
     self._forward_clock = self.c.app.clock.forward
Ejemplo n.º 9
0
    def test_process_initializer(self, set_mp_process_title, _signals):
        with mock.restore_logging():
            from celery import signals
            from celery._state import _tls
            from celery.concurrency.prefork import (
                process_initializer, WORKER_SIGRESET, WORKER_SIGIGNORE,
            )
            on_worker_process_init = Mock()
            signals.worker_process_init.connect(on_worker_process_init)

            def Loader(*args, **kwargs):
                loader = Mock(*args, **kwargs)
                loader.conf = {}
                loader.override_backends = {}
                return loader

            with self.Celery(loader=Loader) as app:
                app.conf = AttributeDict(DEFAULTS)
                process_initializer(app, 'awesome.worker.com')
                _signals.ignore.assert_any_call(*WORKER_SIGIGNORE)
                _signals.reset.assert_any_call(*WORKER_SIGRESET)
                self.assertTrue(app.loader.init_worker.call_count)
                on_worker_process_init.assert_called()
                self.assertIs(_tls.current_app, app)
                set_mp_process_title.assert_called_with(
                    'celeryd', hostname='awesome.worker.com',
                )

                with patch('celery.app.trace.setup_worker_optimizations') as S:
                    os.environ['FORKED_BY_MULTIPROCESSING'] = '1'
                    try:
                        process_initializer(app, 'luke.worker.com')
                        S.assert_called_with(app, 'luke.worker.com')
                    finally:
                        os.environ.pop('FORKED_BY_MULTIPROCESSING', None)

                os.environ['CELERY_LOG_FILE'] = 'worker%I.log'
                app.log.setup = Mock(name='log_setup')
                try:
                    process_initializer(app, 'luke.worker.com')
                finally:
                    os.environ.pop('CELERY_LOG_FILE', None)
Ejemplo n.º 10
0
 def test_on_start_proc_alive_timeout_custom(self, __forking_enable):
     app = Mock(conf=AttributeDict(DEFAULTS))
     app.conf.worker_proc_alive_timeout = 8.0
     pool = TaskPool(4, app=app)
     pool.on_start()
     assert pool._pool._proc_alive_timeout == 8.0
Ejemplo n.º 11
0
 def test_on_start_proc_alive_timeout_default(self, __forking_enable):
     app = Mock(conf=AttributeDict(DEFAULTS))
     pool = TaskPool(4, app=app)
     pool.on_start()
     assert pool._pool._proc_alive_timeout == 4.0
Ejemplo n.º 12
0
 def create_state(self, **kwargs):
     kwargs.setdefault('app', self.app)
     kwargs.setdefault('hostname', hostname)
     kwargs.setdefault('tset', set)
     return AttributeDict(kwargs)
Ejemplo n.º 13
0
    def _delta_to_next(self, last_run_at, next_hour, next_minute, next_second):
        """Find next delta.

        Takes a :class:`~datetime.datetime` of last run, next minute and hour,
        and returns a :class:`~celery.utils.time.ffwd` for the next
        scheduled day and time.

        Only called when ``day_of_month`` and/or ``month_of_year``
        cronspec is specified to further limit scheduled task execution.
        """
        datedata = AttributeDict(year=last_run_at.year)
        days_of_month = sorted(self.day_of_month)
        months_of_year = sorted(self.month_of_year)

        def day_out_of_range(year, month, day):
            try:
                datetime(year=year, month=month, day=day)
            except ValueError:
                return True
            return False

        def roll_over():
            for _ in range(2000):
                flag = (datedata.dom == len(days_of_month) or day_out_of_range(
                    datedata.year, months_of_year[datedata.moy],
                    days_of_month[datedata.dom]) or (self.maybe_make_aware(
                        datetime(datedata.year, months_of_year[datedata.moy],
                                 days_of_month[datedata.dom])) < last_run_at))

                if flag:
                    datedata.dom = 0
                    datedata.moy += 1
                    if datedata.moy == len(months_of_year):
                        datedata.moy = 0
                        datedata.year += 1
                else:
                    break
            else:
                # Tried 2000 times, we're most likely in an infinite loop
                raise RuntimeError('unable to rollover, '
                                   'time specification is probably invalid')

        if last_run_at.month in self.month_of_year:
            datedata.dom = bisect(days_of_month, last_run_at.day)
            datedata.moy = bisect_left(months_of_year, last_run_at.month)
        else:
            datedata.dom = 0
            datedata.moy = bisect(months_of_year, last_run_at.month)
            if datedata.moy == len(months_of_year):
                datedata.moy = 0
        roll_over()

        while 1:
            th = datetime(year=datedata.year,
                          month=months_of_year[datedata.moy],
                          day=days_of_month[datedata.dom])
            if th.isoweekday() % 7 in self.day_of_week:
                break
            datedata.dom += 1
            roll_over()

        return ffwd(year=datedata.year,
                    month=months_of_year[datedata.moy],
                    day=days_of_month[datedata.dom],
                    hour=next_hour,
                    minute=next_minute,
                    second=next_second,
                    microsecond=0)