示例#1
0
    def test_celery_integration(self, broker_url):
        success_command = ['airflow', 'run', 'true', 'some_parameter']
        fail_command = ['airflow', 'version']

        def fake_execute_command(command):
            if command != success_command:
                raise AirflowException("fail")

        with self._prepare_app(broker_url,
                               execute=fake_execute_command) as app:
            executor = celery_executor.CeleryExecutor()
            executor.start()

            with start_worker(app=app, logfile=sys.stdout, loglevel='debug'):
                cached_celery_backend = celery_executor.execute_command.backend
                task_tuples_to_send = [
                    ('success', 'fake_simple_ti', success_command,
                     celery_executor.
                     celery_configuration['task_default_queue'],
                     celery_executor.execute_command),
                    ('fail', 'fake_simple_ti', fail_command, celery_executor.
                     celery_configuration['task_default_queue'],
                     celery_executor.execute_command)
                ]

                chunksize = executor._num_tasks_per_send_process(
                    len(task_tuples_to_send))
                num_processes = min(len(task_tuples_to_send),
                                    executor._sync_parallelism)

                send_pool = Pool(processes=num_processes)
                key_and_async_results = send_pool.map(
                    celery_executor.send_task_to_executor,
                    task_tuples_to_send,
                    chunksize=chunksize)

                send_pool.close()
                send_pool.join()

                for key, command, result in key_and_async_results:
                    # Only pops when enqueued successfully, otherwise keep it
                    # and expect scheduler loop to deal with it.
                    result.backend = cached_celery_backend
                    executor.running[key] = command
                    executor.tasks[key] = result
                    executor.last_state[key] = celery_states.PENDING

                executor.running['success'] = True
                executor.running['fail'] = True

                executor.end(synchronous=True)

        self.assertTrue(executor.event_buffer['success'], State.SUCCESS)
        self.assertTrue(executor.event_buffer['fail'], State.FAILED)

        self.assertNotIn('success', executor.tasks)
        self.assertNotIn('fail', executor.tasks)

        self.assertNotIn('success', executor.last_state)
        self.assertNotIn('fail', executor.last_state)
示例#2
0
    def test_celery_integration(self):
        executor = CeleryExecutor()
        executor.start()
        with start_worker(app=app, logfile=sys.stdout, loglevel='debug'):

            success_command = ['true', 'some_parameter']
            fail_command = ['false', 'some_parameter']

            executor.execute_async(key='success', command=success_command)
            # errors are propagated for some reason
            try:
                executor.execute_async(key='fail', command=fail_command)
            except:
                pass
            executor.running['success'] = True
            executor.running['fail'] = True

            executor.end(synchronous=True)

        self.assertTrue(executor.event_buffer['success'], State.SUCCESS)
        self.assertTrue(executor.event_buffer['fail'], State.FAILED)

        self.assertNotIn('success', executor.tasks)
        self.assertNotIn('fail', executor.tasks)

        self.assertNotIn('success', executor.last_state)
        self.assertNotIn('fail', executor.last_state)
示例#3
0
def celery_session_worker(request, celery_session_app, celery_worker_pool,
                          celery_worker_parameters):
    """Session Fixture: Start worker that lives throughout test suite."""
    with worker.start_worker(celery_session_app,
                             pool=celery_worker_pool,
                             **celery_worker_parameters) as worker_context:
        yield worker_context
示例#4
0
def flask_celery_worker(flask_celery_app: celery.Celery, db):  # type: ignore
    # pylint: disable=W0621
    with worker.start_worker(
        flask_celery_app, pool="solo", perform_ping_check=False,
    ) as w:
        yield w
    db.session.close_all()
    def test_celery_integration(self):
        executor = CeleryExecutor()
        executor.start()
        with start_worker(app=app, logfile=sys.stdout, loglevel='debug'):

            success_command = ['true', 'some_parameter']
            fail_command = ['false', 'some_parameter']

            executor.execute_async(key='success', command=success_command)
            # errors are propagated for some reason
            try:
                executor.execute_async(key='fail', command=fail_command)
            except:
                pass
            executor.running['success'] = True
            executor.running['fail'] = True

            executor.end(synchronous=True)

        self.assertTrue(executor.event_buffer['success'], State.SUCCESS)
        self.assertTrue(executor.event_buffer['fail'], State.FAILED)

        self.assertNotIn('success', executor.tasks)
        self.assertNotIn('fail', executor.tasks)

        self.assertNotIn('success', executor.last_state)
        self.assertNotIn('fail', executor.last_state)
示例#6
0
 def setUp(self):
     Organism.objects.create(
         display_name=display_name, short_name=short_name,
         tax_id=tax_id)
     organism = Organism.objects.get(short_name=short_name)
     prepare_test_fasta_file()
     self.files = test_files
     HmmerDB.objects.create(
         fasta_file=FileObject('/blast/db/clec_peptide_example_BLASTdb.fa'),
         organism=organism, is_shown=True, title=title)
     if not DEBUG:
         # headless chrome driver
         options = webdriver.ChromeOptions()
         options.add_argument('headless')
         options.add_argument('window-size=1280x800')
         self.driver = webdriver.Chrome(chrome_options=options)
     else:
         # use with header
         self.driver = webdriver.Chrome()
         # Or use different webdriver
         # self.driver = webdriver.PhantomJS()
         # self.driver = webdriver.Firefox()
         self.driver.set_window_size(1280, 800)
     # Start up celery worker for testing
     self.celery_worker = start_worker(app)
     self.celery_worker.__enter__()
示例#7
0
    def test_start_worker_with_exception(self):
        """Make sure that start_worker does not hang on exception"""

        with pytest.raises(NotImplementedError):
            with start_worker(app=self.app, loglevel=0):
                result = self.error_task.apply_async()
                result.get(timeout=5)
示例#8
0
    def setUpClass(cls):
        super().setUpClass()

        # Start up celery worker
        app.loader.import_module('celery.contrib.testing.tasks')
        cls.celery_worker = start_worker(app)
        cls.celery_worker.__enter__()
 def setUpClass(cls):
     super().setUpClass()
     # start celery worker with test app context and in-memory context
     celery_app = make_celery(app)
     cls.celery_worker = start_worker(celery_app, perform_ping_check=False)
     # spawn celery worker
     cls.celery_worker.__enter__()
示例#10
0
    def test_celery_integration(self, broker_url):
        success_command = ['airflow', 'tasks', 'run', 'true', 'some_parameter']
        fail_command = ['airflow', 'version']

        def fake_execute_command(command):
            if command != success_command:
                raise AirflowException("fail")

        with _prepare_app(broker_url, execute=fake_execute_command) as app:
            executor = celery_executor.CeleryExecutor()
            self.assertEqual(executor.tasks, {})
            executor.start()

            with start_worker(app=app, logfile=sys.stdout, loglevel='info'):
                execute_date = datetime.now()

                task_tuples_to_send = [
                    (('success', 'fake_simple_ti', execute_date,
                      0), None, success_command, celery_executor.
                     celery_configuration['task_default_queue'],
                     celery_executor.execute_command),
                    (('fail', 'fake_simple_ti', execute_date,
                      0), None, fail_command, celery_executor.
                     celery_configuration['task_default_queue'],
                     celery_executor.execute_command)
                ]

                # "Enqueue" them. We don't have a real SimpleTaskInstance, so directly edit the dict
                for (key, simple_ti, command, queue,
                     task) in task_tuples_to_send:  # pylint: disable=W0612
                    executor.queued_tasks[key] = (command, 1, queue, simple_ti)

                executor._process_tasks(task_tuples_to_send)

                self.assertEqual(
                    list(executor.tasks.keys()),
                    [('success', 'fake_simple_ti', execute_date, 0),
                     ('fail', 'fake_simple_ti', execute_date, 0)])
                self.assertEqual(
                    executor.event_buffer[('success', 'fake_simple_ti',
                                           execute_date, 0)][0], State.QUEUED)
                self.assertEqual(
                    executor.event_buffer[('fail', 'fake_simple_ti',
                                           execute_date, 0)][0], State.QUEUED)

                executor.end(synchronous=True)

        self.assertEqual(
            executor.event_buffer[('success', 'fake_simple_ti', execute_date,
                                   0)][0], State.SUCCESS)
        self.assertEqual(
            executor.event_buffer[('fail', 'fake_simple_ti', execute_date,
                                   0)][0], State.FAILED)

        self.assertNotIn('success', executor.tasks)
        self.assertNotIn('fail', executor.tasks)

        self.assertEqual(executor.queued_tasks, {})
        self.assertEqual(timedelta(0, 600), executor.task_adoption_timeout)
示例#11
0
 def setUpClass(cls):
     """Start a celery worker"""
     super().setUpClass()
     # Special namespace loading of methods needed by start_worker, per the celery docs
     app.loader.import_module("celery.contrib.testing.tasks")
     cls.clear_worker()
     cls.celery_worker = start_worker(app, concurrency=1)
     cls.celery_worker.__enter__()
示例#12
0
    def setUpClass(cls) -> None:
        super().setUpClass()

        # https://stackoverflow.com/questions/46530784/make-django-test-case-database-visible-to-celery/46564964#46564964
        cls.celery_worker = start_worker(app, perform_ping_check=False)
        cls.celery_worker.__enter__()

        # show logs during testing
        logging.getLogger().setLevel(logging.WARNING)
def swh_scheduler_celery_worker(
    swh_scheduler_celery_app,
    swh_scheduler_celery_includes,
):
    """Spawn a worker"""
    for module in swh_scheduler_celery_includes:
        swh_scheduler_celery_app.loader.import_task_module(module)
    with worker.start_worker(swh_scheduler_celery_app, pool="solo") as w:
        yield w
def application_worker(request, application, celery_includes,
                       celery_worker_pool, celery_worker_parameters):
    # type: (Any, Celery, Sequence[str], str) -> WorkController
    """Fixture: Start worker in a thread, stop it when the test returns."""
    for module in celery_includes:
        application.loader.import_task_module(module)
    with worker.start_worker(application,
                             pool=celery_worker_pool,
                             **celery_worker_parameters) as w:
        yield w
示例#15
0
 def setUpClass(cls):
     logging.disable(logging.WARNING)
     super().setUpClass()
     # route tasks to this worker by using the default 'celery' queue
     # that is exclusively used for the automated tests
     celery_app.conf.update(task_routes=None)
     cls.celery_worker = start_worker(celery_app,
                                      concurrency=2,
                                      perform_ping_check=False)
     cls.celery_worker.__enter__()
示例#16
0
    def setUpClass(cls):
        super().setUpClass()

        # import the ping task, because the start worker function needs it
        app.loader.import_module("celery.contrib.testing.tasks")
        # Start up celery worker
        cls.celery_worker = start_worker(app)
        cls.celery_worker.__enter__()

        cls.setupTestData()
    def test_celery_integration(self, broker_url):
        with self._prepare_app(broker_url) as app:
            executor = celery_executor.CeleryExecutor()
            executor.start()

            with start_worker(app=app, logfile=sys.stdout, loglevel='info'):
                success_command = ['true', 'some_parameter']
                fail_command = ['false', 'some_parameter']
                execute_date = datetime.datetime.now()

                cached_celery_backend = celery_executor.execute_command.backend
                task_tuples_to_send = [
                    (('success', 'fake_simple_ti', execute_date, 0),
                     None, success_command, celery_executor.celery_configuration['task_default_queue'],
                     celery_executor.execute_command),
                    (('fail', 'fake_simple_ti', execute_date, 0),
                     None, fail_command, celery_executor.celery_configuration['task_default_queue'],
                     celery_executor.execute_command)
                ]

                chunksize = executor._num_tasks_per_send_process(len(task_tuples_to_send))
                num_processes = min(len(task_tuples_to_send), executor._sync_parallelism)

                send_pool = Pool(processes=num_processes)
                key_and_async_results = send_pool.map(
                    celery_executor.send_task_to_executor,
                    task_tuples_to_send,
                    chunksize=chunksize)

                send_pool.close()
                send_pool.join()

                for task_instance_key, _, result in key_and_async_results:
                    # Only pops when enqueued successfully, otherwise keep it
                    # and expect scheduler loop to deal with it.
                    result.backend = cached_celery_backend
                    executor.running.add(task_instance_key)
                    executor.tasks[task_instance_key] = result
                    executor.last_state[task_instance_key] = celery_states.PENDING

                executor.running.add(('success', 'fake_simple_ti', execute_date, 0))
                executor.running.add(('fail', 'fake_simple_ti', execute_date, 0))

                executor.end(synchronous=True)

        self.assertEqual(executor.event_buffer[('success', 'fake_simple_ti', execute_date, 0)], State.SUCCESS)
        self.assertEqual(executor.event_buffer[('fail', 'fake_simple_ti', execute_date, 0)], State.FAILED)

        self.assertNotIn('success', executor.tasks)
        self.assertNotIn('fail', executor.tasks)

        self.assertNotIn('success', executor.last_state)
        self.assertNotIn('fail', executor.last_state)
示例#18
0
    def setUpClass(cls):
        super().setUpClass()
        # Instantiate the browser object
        profile = cls._create_firefox_profile()
        cls.browser = Browser("remote",
                              command_executor=SELENIUM_URL,
                              browser_profile=profile)

        if cls.requires_celery:
            app.loader.import_module("celery.contrib.testing.tasks")
            cls.clear_worker()
            cls.celery_worker = start_worker(app, concurrency=1)
            cls.celery_worker.__enter__()
示例#19
0
def celery_test_fixture(user_object):
    """
    This Fixture starts a celery worker in the running test container and exits it.
    With this manual entering and exiting it is possible to use the time set by freezgun and also
    use the Testdatabase.
    :param user_object:
    :param contract_ending_in_february:
    :return:
    """
    worker = start_worker(app)
    worker.__enter__()
    yield
    worker.__exit__(None, None, None)
示例#20
0
    def setUp(self):
        celery_app = TestApp(set_as_current=False,
                             enable_logging=True,
                             config=self['celery_config'],
                             **self['celery_parameters'])
        self['celery_app_fixture'] = setup_default_app(celery_app)
        self['celery_app_fixture'].__enter__()

        for module in self['celery_includes']:
            celery_app.loader.import_task_module(module)

        self['celery_worker_fixture'] = start_worker(
            celery_app, pool='prefork', **self['celery_worker_parameters'])
        self['celery_worker_fixture'].__enter__()
    def test_celery_integration(self, broker_url):
        with self._prepare_app(broker_url) as app:
            executor = celery_executor.CeleryExecutor()
            executor.start()

            with start_worker(app=app, logfile=sys.stdout, loglevel='debug'):
                success_command = ['true', 'some_parameter']
                fail_command = ['false', 'some_parameter']

                cached_celery_backend = celery_executor.execute_command.backend
                task_tuples_to_send = [('success', 'fake_simple_ti', success_command,
                                        celery_executor.celery_configuration['task_default_queue'],
                                        celery_executor.execute_command),
                                       ('fail', 'fake_simple_ti', fail_command,
                                        celery_executor.celery_configuration['task_default_queue'],
                                        celery_executor.execute_command)]

                chunksize = executor._num_tasks_per_send_process(len(task_tuples_to_send))
                num_processes = min(len(task_tuples_to_send), executor._sync_parallelism)

                send_pool = Pool(processes=num_processes)
                key_and_async_results = send_pool.map(
                    celery_executor.send_task_to_executor,
                    task_tuples_to_send,
                    chunksize=chunksize)

                send_pool.close()
                send_pool.join()

                for key, command, result in key_and_async_results:
                    # Only pops when enqueued successfully, otherwise keep it
                    # and expect scheduler loop to deal with it.
                    result.backend = cached_celery_backend
                    executor.running[key] = command
                    executor.tasks[key] = result
                    executor.last_state[key] = celery_states.PENDING

                executor.running['success'] = True
                executor.running['fail'] = True

                executor.end(synchronous=True)

        self.assertTrue(executor.event_buffer['success'], State.SUCCESS)
        self.assertTrue(executor.event_buffer['fail'], State.FAILED)

        self.assertNotIn('success', executor.tasks)
        self.assertNotIn('fail', executor.tasks)

        self.assertNotIn('success', executor.last_state)
        self.assertNotIn('fail', executor.last_state)
示例#22
0
def celery_worker(request,
                  celery_app,
                  celery_includes,
                  celery_worker_pool,
                  celery_worker_parameters):
    # type: (Any, Celery, Sequence[str], str) -> WorkController
    """Fixture: Start worker in a thread, stop it when the test returns."""
    if not NO_WORKER:
        for module in celery_includes:
            celery_app.loader.import_task_module(module)
        with worker.start_worker(celery_app,
                                 pool=celery_worker_pool,
                                 **celery_worker_parameters) as w:
            yield w
示例#23
0
def celery_session_worker(request,
                          celery_session_app,
                          celery_includes,
                          celery_worker_pool,
                          celery_worker_parameters):
    # type: (Any, Celery, Sequence[str], str) -> WorkController
    """Session Fixture: Start worker that lives throughout test suite."""
    if not NO_WORKER:
        for module in celery_includes:
            celery_session_app.loader.import_task_module(module)
        with worker.start_worker(celery_session_app,
                                 pool=celery_worker_pool,
                                 **celery_worker_parameters) as w:
            yield w
示例#24
0
def test_worker_status(exporter, celery_app):
    threading.Thread(target=exporter.run, args=(exporter.cfg, ),
                     daemon=True).start()
    time.sleep(5)

    with start_worker(celery_app, without_heartbeat=False) as celery_worker:
        hostname = celery_worker.hostname
        time.sleep(2)
        assert (exporter.registry.get_sample_value(
            "celery_worker_up", labels={"hostname": hostname}) == 1.0)

    time.sleep(2)
    assert (exporter.registry.get_sample_value("celery_worker_up",
                                               labels={"hostname":
                                                       hostname}) == 0.0)
示例#25
0
def celery_test_fixture_end_of_year_test(december_contract):
    """
    This fixture creates a Contract which starts on 1.1.2019 and ends at 29.2.2020.
    The expected output is as in every other month: -1200 minutes on the automatically created Report at the beginning
    of a month.

    :param user_object:
    :param contract_ending_in_february:
    :return:
    """

    worker = start_worker(app)
    worker.__enter__()
    yield
    worker.__exit__(None, None, None)
示例#26
0
 def setUpClass(cls):
     super().setUpClass()
     app.loader.import_module('celery.contrib.testing.tasks')
     # Start up celery worker
     cls.celery_worker = start_worker(app)
     cls.celery_worker.__enter__()
     # create a dummy job in the in-memory database
     job = Job(
         name="test job",
         status="started",
     )
     job.save()
     cls.job_id = job.id
     # call command for populating database
     call_command('populateHierarchies', verbosity=0)
示例#27
0
    def setUpClass(cls):
        super().setUpClass()
        # Instantiate the browser object.
        profile = cls._create_firefox_profile()
        cls.browser = Browser(
            "remote",
            command_executor=SELENIUM_URL,
            browser_profile=profile,
            # See: https://developer.mozilla.org/en-US/docs/Web/WebDriver/Timeouts
            # desired_capabilities={"timeouts": {"implicit": 60 * 60 * 1000 }},  # 1 hour timeout
        )

        if cls.requires_celery:
            app.loader.import_module("celery.contrib.testing.tasks")
            cls.clear_worker()
            cls.celery_worker = start_worker(app, concurrency=1)
            cls.celery_worker.__enter__()
示例#28
0
 def setUp(self):
     if not DEBUG:
         # headless chrome driver
         options = webdriver.ChromeOptions()
         options.add_argument('headless')
         options.add_argument('window-size=1280x800')
         self.driver = webdriver.Chrome(chrome_options=options)
     else:
         # use with header
         self.driver = webdriver.Chrome()
         # Or use different webdriver
         # self.driver = webdriver.PhantomJS()
         # self.driver = webdriver.Firefox()
         self.driver.set_window_size(1280, 800)
     # Start up celery worker for testing
     self.celery_worker = start_worker(app)
     self.celery_worker.__enter__()
示例#29
0
def celery_test_fixture_correct_minutes(user_object,
                                        contract_ending_in_february):
    """
    This fixture modifies the automatically created report for January to have symbolicaly 600 minutes of work
    documented. It is needed to test whether the automatic Report creation carries over the minutes of
    the last month.
    :param user_object:
    :param contract_ending_in_february:
    :return:
    """
    january_report = contract_ending_in_february.reports.get(
        month_year__month=1)
    january_report.worktime = datetime.timedelta(minutes=600)
    january_report.save()
    worker = start_worker(app)
    worker.__enter__()
    yield
    worker.__exit__(None, None, None)
示例#30
0
 def setUpClass(self):
     super(QueryTestCase, self).setUpClass()
     # Start up celery worker for testing
     self.celery_worker = start_worker(app)
     self.celery_worker.__enter__()
     Organism.objects.create(display_name=display_name,
                             short_name=short_name,
                             tax_id=tax_id)
     organism = Organism.objects.get(short_name=short_name)
     sequence = SequenceType.objects.create(molecule_type='prot',
                                            dataset_type=dataset_type)
     prepare_test_fasta_file()
     self.files = test_files
     BlastDb.objects.create(
         fasta_file=FileObject('/blast/db/clec_peptide_example_BLASTdb.fa'),
         organism=organism,
         type=sequence,
         is_shown=False,
         title=title)
     organism = Organism.objects.get(short_name=short_name)
     blastdb = BlastDb.objects.get(organism=organism)
     returncode, error, output = blastdb.makeblastdb()
     returncode, error, output = blastdb.index_fasta()
     blastdb.is_shown = True
     blastdb.save()
     if not DEBUG:
         # headless chrome driver
         options = webdriver.ChromeOptions()
         options.add_argument('headless')
         options.add_argument('window-size=1280x800')
         self.driver = webdriver.Chrome(chrome_options=options)
     else:
         # use with header
         self.driver = webdriver.Chrome()
         # Or use different webdriver
         # self.driver = webdriver.PhantomJS()
         # self.driver = webdriver.Firefox()
         self.driver.set_window_size(1280, 800)
示例#31
0
 def setUp(self):
     User = get_user_model()
     self.username = '******'
     self.password = '******'
     User.objects.create_superuser(
         username=self.username,
         password=self.password,
         email='*****@*****.**')
     if not DEBUG:
         # headless chrome driver
         options = webdriver.ChromeOptions()
         options.add_argument('headless')
         options.add_argument('window-size=1280x800')
         self.driver = webdriver.Chrome(chrome_options=options)
     else:
         # use with header
         self.driver = webdriver.Chrome()
         # Or use different webdriver
         # self.driver = webdriver.PhantomJS()
         # self.driver = webdriver.Firefox()
         self.driver.set_window_size(1280, 800)
     # Start up celery worker for testing
     self.celery_worker = start_worker(app)
     self.celery_worker.__enter__()
示例#32
0
 def setUpClass(cls):
     super().setUpClass()
     app = Celery('openwisp2')
     app.config_from_object('django.conf:settings', namespace='CELERY')
     app.autodiscover_tasks()
     cls.celery_worker = start_worker(app)