Ejemplo n.º 1
0
 def test_func__reset_password__ok__nominal_case(self):
     uapi = UserApi(
         current_user=None,
         session=self.session,
         config=self.app_config,
     )
     current_user = uapi.get_one_by_email('*****@*****.**')
     uapi.reset_password_notification(current_user, do_save=True)
     transaction.commit()
     # Send mail async from redis queue
     redis = get_redis_connection(self.app_config)
     queue = get_rq_queue(
         redis,
         'mail_sender',
     )
     worker = SimpleWorker([queue], connection=queue.connection)
     worker.work(burst=True)
     # check mail received
     response = requests.get('http://127.0.0.1:8025/api/v1/messages')
     response = response.json()
     headers = response[0]['Content']['Headers']
     assert headers['From'][
         0] == 'Tracim Notifications <test_user_from+0@localhost>'  # nopep8
     assert headers['To'][0] == 'Global manager <*****@*****.**>'
     assert headers['Subject'][0] == '[TRACIM] Reset Password Request'
def test_coverage_summary_by_changeset(coverage_builds):
    from rq import Queue
    from codecoverage_backend import api
    from tests.conftest import mock_coverage_by_changeset_job_success

    # patch the queue to be sync to allow it run without workers. http://python-rq.org/docs/testing/
    with mock.patch('codecoverage_backend.api.q',
                    Queue(connection=FakeStrictRedis())) as q:
        # patch the mock_coverage_by_changeset
        with mock.patch('codecoverage_backend.api.coverage_by_changeset_job',
                        mock_coverage_by_changeset_job_success):
            # Get changeset coverage information
            for changeset, expected in coverage_builds['summary'].items():
                result, code = api.coverage_summary_by_changeset(changeset)
                assert code == 202

            # test that in the case of exception it will return 500
            result, code = api.coverage_summary_by_changeset(
                'mozilla test changeset')
            assert code == 202

            # run simple worker to run all tasks
            w = SimpleWorker([q], connection=q.connection)
            w.work(burst=True)

            # Everything should be 200 now
            for changeset, expected in coverage_builds['summary'].items():
                result, code = api.coverage_summary_by_changeset(changeset)
                assert result == expected
                assert code == 200

            # except the incorrect changeset, should be 500
            result, code = api.coverage_summary_by_changeset(
                'mozilla test changeset')
            assert code == 500
Ejemplo n.º 3
0
    def test_func__create_user_with_mail_notification__ok__nominal_case(
            self, mailhog, user_api_factory, app_config):
        api = user_api_factory.get(current_user=None)
        u = api.create_user(
            email="bob@bob",
            password="******",
            name="bob",
            timezone="+2",
            do_save=True,
            do_notify=True,
        )
        assert u is not None
        assert u.email == "bob@bob"
        assert u.validate_password("password")
        assert u.display_name == "bob"
        assert u.timezone == "+2"

        # Send mail async from redis queue
        redis = get_redis_connection(app_config)
        queue = get_rq_queue(redis, "mail_sender")
        worker = SimpleWorker([queue], connection=queue.connection)
        worker.work(burst=True)
        # check mail received
        response = mailhog.get_mailhog_mails()
        headers = response[0]["Content"]["Headers"]
        assert headers["From"][
            0] == "Global manager via Tracim <test_user_from+1@localhost>"
        assert headers["To"][0] == "bob <bob@bob>"
        assert headers["Subject"][0] == "[Tracim] Created account"
Ejemplo n.º 4
0
def test_create_upload_delete_compute(
    client, nocommit_transaction, new_job, weather_df, async_queue, mocker, auth0_id
):
    cr = client.post("/jobs/", data=new_job.json())
    assert cr.status_code == 201
    new_id = cr.json()["object_id"]
    response = client.get(f"/jobs/{new_id}")
    assert response.status_code == 200
    stored_job = response.json()
    assert len(stored_job["data_objects"]) == 1
    data_id = stored_job["data_objects"][0]["object_id"]
    iob = BytesIO()
    weather_df.to_feather(iob)
    iob.seek(0)
    response = client.post(
        f"/jobs/{new_id}/data/{data_id}",
        files={"file": ("test.arrow", iob, "application/vnd.apache.arrow.file")},
    )
    assert response.status_code == 200
    response = client.get(f"/jobs/{new_id}/status")
    assert response.json()["status"] == "prepared"
    response = client.post(f"/jobs/{new_id}/compute")
    assert response.status_code == 202
    response = client.get(f"/jobs/{new_id}/status")
    assert response.json()["status"] == "queued"
    with storage.StorageInterface(user=auth0_id).start_transaction() as st:
        st.delete_job(new_id)

    ww = SimpleWorker([async_queue], connection=async_queue.connection)
    log = mocker.spy(ww, "log")
    ww.work(burst=True)
    # worker logs error when exception raised in job
    assert log.error.call_count == 0
Ejemplo n.º 5
0
    def test_func__create_user_with_mail_notification__ok__nominal_case(self):
        api = UserApi(
            current_user=None,
            session=self.session,
            config=self.app_config,
        )
        u = api.create_user(
            email='bob@bob',
            password='******',
            name='bob',
            timezone='+2',
            do_save=True,
            do_notify=True,
        )
        assert u is not None
        assert u.email == "bob@bob"
        assert u.validate_password('password')
        assert u.display_name == 'bob'
        assert u.timezone == '+2'

        # Send mail async from redis queue
        redis = get_redis_connection(self.app_config)
        queue = get_rq_queue(
            redis,
            'mail_sender',
        )
        worker = SimpleWorker([queue], connection=queue.connection)
        worker.work(burst=True)
        # check mail received
        response = self.get_mailhog_mails()
        headers = response[0]['Content']['Headers']
        assert headers['From'][
            0] == 'Tracim Notifications <test_user_from+0@localhost>'  # nopep8
        assert headers['To'][0] == 'bob <bob@bob>'
        assert headers['Subject'][0] == '[TRACIM] Created account'
def test_sync_jobs(mocker):
    qm = queuing.QueueManager()
    qm.job_func = run
    job_status = {
        "0": "queued",
        "1": "queued",
        "3": "queued",
    }
    qm.q.enqueue(fail, ValueError, "0 isnt 1", job_id="0")
    w = SimpleWorker([qm.q], connection=qm.redis_conn)
    w.work(burst=True)
    assert len(qm.q.failed_job_registry) == 1

    queued = {str(i): "user" for i in range(5)}
    qm.enqueue_job("4", "user")
    assert qm.q.job_ids == ["4"]

    mocker.patch("solarperformanceinsight_api.queuing.time.sleep",
                 side_effect=KeyboardInterrupt)

    jmi = mocker.MagicMock()
    startt = jmi.start_transaction.return_value.__enter__.return_value
    startt.list_queued_jobs.return_value = queued
    startt.list_status_of_jobs.return_value = job_status
    mocker.patch(
        "solarperformanceinsight_api.queuing._get_job_management_interface",
        return_value=jmi,
    )
    queuing.sync_jobs()
    # 0 failed, 2 is missing
    assert set(qm.q.job_ids) == {"1", "3"}
    assert startt.report_job_failure.call_count == 1
Ejemplo n.º 7
0
def test_monitor_job_with_retry2(client):
    """Test monitoring a job for a task that fails stops after max retries"""
    with client.application.app_context():
        app = client.application
        app.redis.flushall()

        task_id = str(uuid4())
        t = Task.create_task(task_id)
        j = t.create_job()
        job_id = j.job_id
        j.metadata["retries"] = 3
        j.metadata["retry_count"] = 3

        ex = j.create_execution("image", "command")

        j.save()

        exec_mock = MagicMock()
        exec_mock.get_result.return_value = MagicMock(
            exit_code=1, log="".encode("utf-8"), error="error".encode("utf-8")
        )
        client.application.executor = exec_mock

        queue = Queue("monitor", is_async=False, connection=client.application.redis)
        result = queue.enqueue(job_mod.monitor_job, t.task_id, job_id, ex.execution_id)

        worker = SimpleWorker([queue], connection=queue.connection)
        worker.work(burst=True)

        t.reload()
        expect(t.jobs).to_length(1)

        job = t.jobs[0]
        expect(job.executions).to_length(1)

        execution = job.executions[0]
        expect(execution.image).to_equal("image")
        expect(execution.command).to_equal("command")

        hash_key = f"rq:job:{result.id}"

        res = app.redis.exists(hash_key)
        expect(res).to_be_true()

        res = app.redis.hget(hash_key, "status")
        expect(res).to_equal("finished")

        res = app.redis.hexists(hash_key, "data")
        expect(res).to_be_true()

        keys = app.redis.keys()
        next_job_id = [
            key

            for key in keys

            if key.decode("utf-8").startswith("rq:job")
            and not key.decode("utf-8").endswith(result.id)
        ]
        expect(next_job_id).to_length(0)
Ejemplo n.º 8
0
 def test_func__reset_password__ok__nominal_case(self):
     uapi = UserApi(
         current_user=None,
         session=self.session,
         config=self.app_config,
     )
     current_user = uapi.get_one_by_email('*****@*****.**')
     uapi.reset_password_notification(current_user, do_save=True)
     transaction.commit()
     # Send mail async from redis queue
     redis = get_redis_connection(
         self.app_config
     )
     queue = get_rq_queue(
         redis,
         'mail_sender',
     )
     worker = SimpleWorker([queue], connection=queue.connection)
     worker.work(burst=True)
     # check mail received
     response = self.get_mailhog_mails()
     headers = response[0]['Content']['Headers']
     assert headers['From'][0] == 'Tracim Notifications <test_user_from+0@localhost>'  # nopep8
     assert headers['To'][0] == 'Global manager <*****@*****.**>'
     assert headers['Subject'][0] == '[TRACIM] A password reset has been requested'
Ejemplo n.º 9
0
    def setUp(self):
        views.db.init_app(self.app)
        with self.app.test_request_context():
            views.db.create_all()

        self.queue = Queue(async=False, connection=fakeredis.FakeStrictRedis())
        self.worker = SimpleWorker([self.queue], connection=self.queue.connection)
Ejemplo n.º 10
0
def test_coverage_summary_by_changeset(coverage_builds):
    from rq import Queue
    from codecoverage_backend import api
    from tests.conftest import mock_coverage_by_changeset_job_success

    # patch the queue to be sync to allow it run without workers. http://python-rq.org/docs/testing/
    with mock.patch('codecoverage_backend.api.q', Queue(connection=FakeStrictRedis())) as q:
        # patch the mock_coverage_by_changeset
        with mock.patch('codecoverage_backend.api.coverage_by_changeset_job', mock_coverage_by_changeset_job_success):
            # Get changeset coverage information
            for changeset, expected in coverage_builds['summary'].items():
                result, code = api.coverage_summary_by_changeset(changeset)
                assert code == 202

            # test that in the case of exception it will return 500
            result, code = api.coverage_summary_by_changeset('mozilla test changeset')
            assert code == 202

            # run simple worker to run all tasks
            w = SimpleWorker([q], connection=q.connection)
            w.work(burst=True)

            # Everything should be 200 now
            for changeset, expected in coverage_builds['summary'].items():
                result, code = api.coverage_summary_by_changeset(changeset)
                assert result == expected
                assert code == 200

            # except the incorrect changeset, should be 500
            result, code = api.coverage_summary_by_changeset('mozilla test changeset')
            assert code == 500
Ejemplo n.º 11
0
    def test_job_retried_correctly(self):
        """To check if the job was correctly failed etc., we should use
        a Worker and not the queue provided with FakeRedisTestCaseMixin."""
        # Create an asynchronous queue.
        # The name `separate_queue` used here is to ensure the queue isn't used
        # anywhere else.
        queue = Queue('separate_queue', connection=self.connection)
        worker = SimpleWorker([queue], connection=queue.connection)

        # log admin user
        self._logSuperuserIn()

        # this job will fail
        job = queue.enqueue(dummy_fail_job)
        self.assertEqual(job.get_status(), 'queued')

        # log the job in our system as RQJob
        rqjob = RQJob.objects.create(job_id=job.id, trigger=self.trigger)

        # run the worker
        worker.work(burst=True)
        self.assertEqual(job.get_status(), 'failed')

        url = reverse('admin:autoemails_rqjob_retry', args=[rqjob.pk])
        rv = self.client.post(url, follow=True)
        self.assertIn(
            f'The job {job.id} was requeued. It will be run shortly.',
            rv.content.decode('utf-8'),
        )

        self.assertEqual(job.get_status(refresh=True), 'queued')
Ejemplo n.º 12
0
class CounterTest(unittest.TestCase):
    def setUp(self):
        app.config['TESING'] = True
        self.app = app.test_client()
        app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db'
        # self.app.config()
        self.worker = SimpleWorker([queue], connection=queue.connection)

        db.create_all()

    def tearDown(self):
        db.session.remove()
        db.drop_all()
        self.worker.work(burst=True)

    def test_flask_application_is_up_and_running(self):
        response = self.app.get('/')
        self.assertEqual(response.status_code, 200)

    def test_redis_server_is_up_and_running(self):
        response = r.ping()
        self.assertEqual(response, True)

    def test_redis_worker(self):
        self.assertEqual(self.worker.get_state(), 'starting')

    def test_can_create_job(self):
        job = queue.enqueue(count_words_at_url, 'https://www.linkedin.com/')
        self.assertEqual(job.is_queued, True)

    def test_successful_result(self):
        url = 'https://raghuvar.github.io/'
        instance_id = count_words_at_url(url)
        instance = Counter.query.filter_by(url=url).first()
        self.assertEqual(instance.word_count, 2214)
Ejemplo n.º 13
0
    def setUp(self):
        app.config['TESING'] = True
        self.app = app.test_client()
        app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db'
        # self.app.config()
        self.worker = SimpleWorker([queue], connection=queue.connection)

        db.create_all()
Ejemplo n.º 14
0
    def test_func__create_new_content_with_notification__ok__nominal_case(
            self):
        uapi = UserApi(
            current_user=None,
            session=self.session,
            config=self.app_config,
        )
        current_user = uapi.get_one_by_email('*****@*****.**')
        # Create new user with notification enabled on w1 workspace
        wapi = WorkspaceApi(
            current_user=current_user,
            session=self.session,
            config=self.app_config,
        )
        workspace = wapi.get_one_by_label('Recipes')
        user = uapi.get_one_by_email('*****@*****.**')
        wapi.enable_notifications(user, workspace)

        api = ContentApi(
            current_user=user,
            session=self.session,
            config=self.app_config,
        )
        item = api.create(
            content_type_list.Folder.slug,
            workspace,
            None,
            'parent',
            do_save=True,
            do_notify=False,
        )
        item2 = api.create(
            content_type_list.File.slug,
            workspace,
            item,
            'file1',
            do_save=True,
            do_notify=True,
        )
        # Send mail async from redis queue
        redis = get_redis_connection(self.app_config)
        queue = get_rq_queue(
            redis,
            'mail_sender',
        )
        worker = SimpleWorker([queue], connection=queue.connection)
        worker.work(burst=True)
        # check mail received
        response = requests.get('http://127.0.0.1:8025/api/v1/messages')
        response = response.json()
        headers = response[0]['Content']['Headers']
        assert headers['From'][
            0] == '"Bob i. via Tracim" <test_user_from+3@localhost>'  # nopep8
        assert headers['To'][0] == 'Global manager <*****@*****.**>'
        assert headers['Subject'][0] == '[TRACIM] [Recipes] file1 (Open)'
        assert headers['References'][0] == 'test_user_refs+22@localhost'
        assert headers['Reply-to'][
            0] == '"Bob i. & all members of Recipes" <test_user_reply+22@localhost>'  # nopep8
Ejemplo n.º 15
0
    def test_func__create_new_content_with_notification__ok__nominal_case(self):
        uapi = UserApi(
            current_user=None,
            session=self.session,
            config=self.app_config,
        )
        current_user = uapi.get_one_by_email('*****@*****.**')
        # Create new user with notification enabled on w1 workspace
        wapi = WorkspaceApi(
            current_user=current_user,
            session=self.session,
            config=self.app_config,
        )
        workspace = wapi.get_one_by_label('Recipes')
        user = uapi.get_one_by_email('*****@*****.**')
        wapi.enable_notifications(user, workspace)

        api = ContentApi(
            current_user=user,
            session=self.session,
            config=self.app_config,
        )
        item = api.create(
            content_type_list.Folder.slug,
            workspace,
            None,
            'parent',
            do_save=True,
            do_notify=False,
        )
        item2 = api.create(
            content_type_list.File.slug,
            workspace,
            item,
            'file1',
            do_save=True,
            do_notify=True,
        )
        # Send mail async from redis queue
        redis = get_redis_connection(
            self.app_config
        )
        queue = get_rq_queue(
            redis,
            'mail_sender',
        )
        worker = SimpleWorker([queue], connection=queue.connection)
        worker.work(burst=True)
        # check mail received
        response = self.get_mailhog_mails()
        headers = response[0]['Content']['Headers']
        assert headers['From'][0] == '"Bob i. via Tracim" <test_user_from+3@localhost>'  # nopep8
        assert headers['To'][0] == 'Global manager <*****@*****.**>'
        assert headers['Subject'][0] == '[TRACIM] [Recipes] file1 (Open)'
        assert headers['References'][0] == 'test_user_refs+22@localhost'
        assert headers['Reply-to'][0] == '"Bob i. & all members of Recipes" <test_user_reply+22@localhost>'  # nopep8
Ejemplo n.º 16
0
    def test_simpleworker_heartbeat_ttl(self):
        """SimpleWorker's key must last longer than job.timeout when working"""
        queue = Queue('foo')

        worker = SimpleWorker([queue])
        job_timeout = 300
        job = queue.enqueue(save_key_ttl, worker.key, job_timeout=job_timeout)
        worker.work(burst=True)
        job.refresh()
        self.assertGreater(job.meta['ttl'], job_timeout)
Ejemplo n.º 17
0
    def test_simpleworker_heartbeat_ttl(self):
        """SimpleWorker's key must last longer than job.timeout when working"""
        queue = Queue('foo')

        worker = SimpleWorker([queue])
        job_timeout = 300
        job = queue.enqueue(save_key_ttl, worker.key, job_timeout=job_timeout)
        worker.work(burst=True)
        job.refresh()
        self.assertGreater(job.meta['ttl'], job_timeout)
Ejemplo n.º 18
0
    def test_work_via_simpleworker(self):
        """Worker processes work, with forking disabled,
        then returns."""
        fooq, barq = Queue("foo"), Queue("bar")
        w = SimpleWorker([fooq, barq])
        self.assertEquals(w.work(burst=True), False, "Did not expect any work on the queue.")

        job = fooq.enqueue(say_pid)
        self.assertEquals(w.work(burst=True), True, "Expected at least some work done.")
        self.assertEquals(job.result, os.getpid(), "PID mismatch, fork() is not supposed to happen here")
Ejemplo n.º 19
0
def process_worker_jobs():
    # We need to do this while testing to avoid strange errors on Circle.
    #
    # See:
    #
    #   http://python-rq.org/docs/testing/
    #   https://github.com/ui/django-rq/issues/123

    queue = django_rq.get_queue()
    worker = SimpleWorker([queue], connection=queue.connection)
    worker.work(burst=True)
Ejemplo n.º 20
0
def process_worker_jobs():
    # We need to do this while testing to avoid strange errors on Circle.
    #
    # See:
    #
    #   http://python-rq.org/docs/testing/
    #   https://github.com/ui/django-rq/issues/123

    queue = django_rq.get_queue()
    worker = SimpleWorker([queue], connection=queue.connection)
    worker.work(burst=True)
Ejemplo n.º 21
0
def test_create_upload_compute_success(client, nocommit_transaction, new_job,
                                       async_queue, mocker, weather_df):
    new_job.irradiance_type = "standard"
    cr = client.post("/jobs/", data=new_job.json())
    assert cr.status_code == 201
    new_id = cr.json()["object_id"]
    response = client.get(f"/jobs/{new_id}")
    assert response.status_code == 200
    stored_job = response.json()
    assert len(stored_job["data_objects"]) == 1
    data_id = stored_job["data_objects"][0]["object_id"]
    iob = BytesIO()
    weather_df.rename(columns={
        "poa_global": "ghi",
        "poa_diffuse": "dhi",
        "poa_direct": "dni"
    }).to_feather(iob)
    iob.seek(0)
    response = client.post(
        f"/jobs/{new_id}/data/{data_id}",
        files={
            "file": ("test.arrow", iob, "application/vnd.apache.arrow.file")
        },
    )
    assert response.status_code == 200
    response = client.get(f"/jobs/{new_id}/status")
    assert response.json()["status"] == "prepared"
    response = client.post(f"/jobs/{new_id}/compute")
    assert response.status_code == 202
    response = client.get(f"/jobs/{new_id}/status")
    assert response.json()["status"] == "queued"
    w = SimpleWorker([async_queue], connection=async_queue.connection)
    w.work(burst=True)
    response = client.get(f"/jobs/{new_id}/status")
    assert response.json()["status"] == "complete"
    response = client.get(f"/jobs/{new_id}/results")
    rj = response.json()
    # system level weather, one inverter, one array
    sp_type = {(r["definition"]["schema_path"], r["definition"]["type"]):
               r["object_id"]
               for r in rj}
    assert set(sp_type.keys()) == {
        ("/", "monthly summary"),
        ("/", "daytime flag"),
        ("/", "performance data"),
        ("/inverters/0", "performance data"),
        ("/inverters/0/arrays/0", "weather data"),
    }
    rid = sp_type[("/", "monthly summary")]
    response = client.get(f"/jobs/{new_id}/results/{rid}")
    data = response.text
    assert data.split("\n")[0] == (
        "month,total_energy,plane_of_array_insolation,"
        "effective_insolation,average_daytime_cell_temperature")
Ejemplo n.º 22
0
    def test_work_via_simpleworker(self):
        """Worker processes work, with forking disabled,
        then returns."""
        fooq, barq = Queue('foo'), Queue('bar')
        w = SimpleWorker([fooq, barq])
        self.assertEqual(w.work(burst=True), False,
                         'Did not expect any work on the queue.')

        job = fooq.enqueue(say_pid)
        self.assertEqual(w.work(burst=True), True,
                         'Expected at least some work done.')
        self.assertEqual(job.result, os.getpid(),
                         'PID mismatch, fork() is not supposed to happen here')
Ejemplo n.º 23
0
def test_twitter_search_gets_processed():
    """Ensures the data can be loaded from twitter and stored as a raw source

    Run one search query
    Run worker
    Check two tweets in raw source
    Run worker
    Check the pixels have been averaged out
    """

    with Connection(connection=redis_db):
        source_queue = Queue(name='source')
        process_queue = Queue(name='process')

        query = 'Test Query'
        size = 2
        twitter.search(input=query, size=size)

        worker = SimpleWorker([source_queue])
        worker.work(burst=True)

        assert len(Tweet.keys()) == size

        worker = SimpleWorker([process_queue])
        worker.work(burst=True)
        #
        # assert len(Tweet.keys()) == size
Ejemplo n.º 24
0
    def test_work_via_simpleworker(self):
        """Worker processes work, with forking disabled,
        then returns."""
        fooq = Queue('foo', connection=self.conn)
        barq = Queue('bar', connection=self.conn)
        w = SimpleWorker([fooq, barq], connection=self.conn)
        self.assertEqual(w.work(burst=True), False,
                         'Did not expect any work on the queue.')

        job = fooq.enqueue(say_pid)
        self.assertEqual(w.work(burst=True), True,
                         'Expected at least some work done.')
        self.assertEqual(job.result, os.getpid(),
                         'PID mismatch, fork() is not supposed to happen here')
Ejemplo n.º 25
0
class TestPostman(TestCase):

    def setUp(self):
        redis_conn = FakeStrictRedis()
        # create queues
        main_queue = Queue("test_medium", connection=redis_conn,
                                failed_ttl=DELETE_FAILED_TIMEOUT,
                                default_timeout=DELETE_FINISHED_TIMEOUT)
        scheduled_queue = Queue("test_scheduled", connection=redis_conn,
                                    failed_ttl=DELETE_FAILED_TIMEOUT,
                                    default_timeout=DELETE_FINISHED_TIMEOUT)
        self.worker = SimpleWorker([main_queue, scheduled_queue],
                               connection=main_queue.connection)

        self.postman = Postman({"viber": Viber(), "whatsapp": WhatsApp(), "telegram": Telegram()})
        self.postman.redis_conn = redis_conn
        self.postman.scheduled_queue = scheduled_queue
        self.postman.main_queue = main_queue

    def tearDown(self):
        self.postman.main_queue.empty()
        self.postman.main_queue.delete(delete_jobs=True)
        self.postman.scheduled_queue.empty()
        self.postman.scheduled_queue.delete(delete_jobs=True)

    def test_send_message_with_failure(self):
        with self.assertRaises(ValueError):
            self.postman.send_message(wrong_messages[1])

        job = self.postman.main_queue.enqueue(fail_job)
        self.worker.work(burst=True)
        self.assertEqual(len(self.postman.main_queue.failed_job_registry.get_job_ids()), 1)
        self.assertEqual(self.postman.main_queue.failed_job_registry.get_job_ids()[0], job.id)

    def test_send_scheduled_message(self):
        result = self.postman.send_message(valide_message_for_postman)
        self.assertEqual(len(self.postman.scheduled_queue.scheduled_job_registry.get_job_ids()), 2)
        self.assertIsInstance(result, dict)
        self.assertEqual(len(result["scheduled"]), 2)
        self.assertListEqual(result["scheduled"], ['Message to user: 1 with body: Hello', 'Message to user: 2 with body: Hello'])

    def test_send_message_with_success(self):
        result = self.postman.send_message(valide_message_for_success_postman)
        self.assertListEqual(result["started"], ['Message to user: 1 with body: Hello', 'Message to user: 2 with body: Hello'])

    def test_get_scheduled(self):
        result = self.postman.send_message(valide_message_for_postman)
        scheduled = self.postman.get_scheduled()
        self.assertEqual(sorted(result["scheduled"]), sorted(scheduled["scheduled"]))
Ejemplo n.º 26
0
    def process_jobs(self):
        """
        Run background tasks.
        """

        # create a request context
        with self.app.test_request_context('/'):
            # set up the request context
            self.app.preprocess_request()

            # create an in-process worker
            worker = SimpleWorker([g.queue], connection=g.queue.connection)

            # process jobs
            worker.work(burst=True)
Ejemplo n.º 27
0
def start_worker():
    """Connect to the database and start listening works in redis"""

    # Check if database is empty for populating places
    if queries.is_database_empty():
        logger.info("Database is empty, populating places and first measures")
        refresh_data.populate_places()
    elif environment.REFRESH_AT_STARTUP:
        logger.info(
            "Existing database detected, retrieving measures for today")
        refresh_data.pull_measures()

    with Connection(conn):
        worker = SimpleWorker(map(Queue, listen))
        worker.work()
Ejemplo n.º 28
0
    def setUp(self):
        redis_conn = FakeStrictRedis()
        # create queues
        main_queue = Queue("test_medium", connection=redis_conn,
                                failed_ttl=DELETE_FAILED_TIMEOUT,
                                default_timeout=DELETE_FINISHED_TIMEOUT)
        scheduled_queue = Queue("test_scheduled", connection=redis_conn,
                                    failed_ttl=DELETE_FAILED_TIMEOUT,
                                    default_timeout=DELETE_FINISHED_TIMEOUT)
        self.worker = SimpleWorker([main_queue, scheduled_queue],
                               connection=main_queue.connection)

        self.postman = Postman({"viber": Viber(), "whatsapp": WhatsApp(), "telegram": Telegram()})
        self.postman.redis_conn = redis_conn
        self.postman.scheduled_queue = scheduled_queue
        self.postman.main_queue = main_queue
Ejemplo n.º 29
0
def _new_rq_worker() -> Worker:
    return SimpleWorker(
        queues=queue_names(),
        connection=redis_connection(),
        queue_class=Queue,  # (1)
        job_class=Job,  # (2)
        exception_handlers=[RqExcMan.exc_handler])  # (3)
Ejemplo n.º 30
0
 def test_func__reset_password__ok__nominal_case(self, user_api_factory, mailhog, app_config):
     uapi = user_api_factory.get()
     current_user = uapi.get_one_by_email("*****@*****.**")
     uapi.reset_password_notification(current_user, do_save=True)
     transaction.commit()
     # Send mail async from redis queue
     redis = get_redis_connection(app_config)
     queue = get_rq_queue(redis, "mail_sender")
     worker = SimpleWorker([queue], connection=queue.connection)
     worker.work(burst=True)
     # check mail received
     response = mailhog.get_mailhog_mails()
     headers = response[0]["Content"]["Headers"]
     assert headers["From"][0] == "Tracim Notifications <test_user_from+0@localhost>"
     assert headers["To"][0] == "Global manager <*****@*****.**>"
     assert headers["Subject"][0] == "[Tracim] A password reset has been requested"
def test_freeze_objects_delete_jobs(session, redis, freeze_objects,
                                    museum_object_factory):
    """
    Freeze object with one pending and one failed job, and ensure
    they are both deleted
    """
    def successful_job():
        return ":)"

    def failing_job():
        raise RuntimeError(":(")

    museum_object_factory(id=123456)

    queue_a = get_queue(QueueType.DOWNLOAD_OBJECT)
    queue_b = get_queue(QueueType.SUBMIT_SIP)

    queue_a.enqueue(successful_job, job_id="download_object_123456")
    queue_b.enqueue(failing_job, job_id="submit_sip_123456")
    SimpleWorker([queue_b], connection=queue_b.connection).work(burst=True)

    freeze_objects(["--delete-jobs", "--reason", "Deleting job", "123456"])

    assert len(queue_a.job_ids) == 0
    assert len(queue_b.job_ids) == 0

    assert session.query(MuseumObject).filter_by(
        id=123456, freeze_reason="Deleting job").count() == 1
Ejemplo n.º 32
0
def test_enqueue_background_job_failing_out_of_sync(tasks_col, mock_redis):
    """Test enqueuing a fake task using "enqueue_background_job"
    and ensure it is handled properly if the failure is recorded in RQ
    but not MongoDB.
    """
    job_id = enqueue_background_job(
        task_func="tests.unit_tests.jobs_test.failing_task",
        queue_name="upload",
        username="******",
        job_kwargs={})

    # Check that the Redis queue has the same job
    upload_queue = get_job_queue("upload")
    assert upload_queue.job_ids == [job_id]

    # Job can be finished
    SimpleWorker([upload_queue], connection=mock_redis).work(burst=True)

    rq_job = upload_queue.fetch_job(job_id)

    assert rq_job.is_failed

    # Update the status in MongoDB to appear in-progress, while in RQ
    # it has already failed
    tasks_col.update_message(job_id, "processing")
    tasks_col.update_status(job_id, "pending")

    # Retrieve the task from MongoDB; it should be automatically updated
    # to match the status in RQ
    task = tasks_col.get(job_id)

    assert task["message"] == "Internal server error"
    assert task["status"] == "error"
Ejemplo n.º 33
0
def test_enqueue_background_job_successful(tasks_col, mock_redis):
    """Test enqueuing a fake task using "enqueue_background_job"
    and ensure it can be executed properly.
    """
    job_id = enqueue_background_job(
        task_func="tests.unit_tests.jobs_test.successful_task",
        queue_name="upload",
        username="******",
        job_kwargs={"value": "spam"})

    # Ensure the job is enqueued and MongoDB entry exists
    pending_jobs = list(tasks_col.find("test_project", "pending"))
    assert len(pending_jobs) == 1

    job = pending_jobs[0]
    assert str(job["_id"]) == job_id

    assert job["project"] == "test_project"
    assert job["message"] == "processing"
    assert job["status"] == "pending"

    # Check that the Redis queue has the same job
    upload_queue = get_job_queue("upload")
    assert upload_queue.job_ids == [job_id]

    # Job can be finished
    SimpleWorker([upload_queue], connection=mock_redis).work(burst=True)

    rq_job = upload_queue.fetch_job(job_id)

    assert rq_job.result == "Task ID = {}, value = spam".format(job_id)
Ejemplo n.º 34
0
def test_reenqueue_object_success(reenqueue_object, session, redis,
                                  museum_object, museum_package):
    # Create fake DB entries
    museum_package.downloaded = True
    museum_package.packaged = True
    museum_package.uploaded = True
    museum_package.rejected = True
    session.commit()

    # Create a job that was completed prior to re-enqueuing
    queue = get_queue(QueueType.CONFIRM_SIP)

    queue.enqueue(successful_job, job_id="confirm_sip_123456")
    SimpleWorker([queue], connection=queue.connection).work(burst=True)

    finished_registry = FinishedJobRegistry(queue=queue)
    assert finished_registry.get_job_ids() == ["confirm_sip_123456"]

    result = reenqueue_object(["123456"])

    assert "Object 123456 re-enqueued" in result.stdout

    # New RQ task was enqueued
    queue = get_queue(QueueType.DOWNLOAD_OBJECT)
    assert "download_object_123456" in queue.job_ids

    # Database was updated
    db_museum_object = session.query(MuseumObject).filter_by(id=123456).one()

    assert len(db_museum_object.packages) == 1
    assert not db_museum_object.latest_package

    # Prior finished job was removed
    assert finished_registry.get_job_ids() == []
Ejemplo n.º 35
0
    def test_func__create_new_content_with_notification__ok__nominal_case(
        self,
        user_api_factory,
        workspace_api_factory,
        content_api_factory,
        mailhog,
        app_config,
        content_type_list,
    ):
        uapi = user_api_factory.get(current_user=None)
        current_user = uapi.get_one_by_email("*****@*****.**")
        # Create new user with notification enabled on w1 workspace
        wapi = workspace_api_factory.get(current_user=current_user)
        workspace = wapi.get_one_by_label("Recipes")
        user = uapi.get_one_by_email("*****@*****.**")
        wapi.enable_notifications(user, workspace)

        api = content_api_factory.get(current_user=user)
        item = api.create(content_type_list.Folder.slug,
                          workspace,
                          None,
                          "parent",
                          do_save=True,
                          do_notify=False)
        api.create(content_type_list.File.slug,
                   workspace,
                   item,
                   "file1",
                   do_save=True,
                   do_notify=True)
        # Send mail async from redis queue
        redis = get_redis_connection(app_config)
        queue = get_rq_queue(redis, "mail_sender")
        worker = SimpleWorker([queue], connection=queue.connection)
        worker.work(burst=True)
        # check mail received
        response = mailhog.get_mailhog_mails()
        headers = response[0]["Content"]["Headers"]
        assert headers["From"][
            0] == '"Bob i. via Tracim" <test_user_from+3@localhost>'
        assert headers["To"][0] == "Global manager <*****@*****.**>"
        assert headers["Subject"][0] == "[Tracim] [Recipes] file1 (Opened)"
        assert headers["References"][0] == "<test_user_refs+22@localhost>"
        assert (
            headers["Reply-to"][0] ==
            '"Bob i. & all members of Recipes" <test_user_reply+22@localhost>')
def get_simple_worker(*queue_names):
    """
    Returns a RQ worker for all queues or specified ones.
    """
    queues = get_queues(*queue_names)
    return SimpleWorker(queues,
                        connection=queues[0].connection,
                        exception_handlers=get_exception_handlers() or None)
    def test_api__add_upload_permission__ok_200__with_email_notification_async(
        self,
        workspace_api_factory,
        content_api_factory,
        session,
        web_testapp,
        content_type_list,
        upload_permission_lib_factory,
        admin_user,
        mailhog,
        app_config,
    ) -> None:
        workspace_api = workspace_api_factory.get()
        workspace = workspace_api.create_workspace("test workspace", save_now=True)
        upload_permission_lib = upload_permission_lib_factory.get()  # type: UploadPermissionLib
        upload_permission_lib.add_permission_to_workspace(
            workspace, emails=["*****@*****.**", "toto <[email protected]>"], do_notify=True
        )
        transaction.commit()

        mailhog.cleanup_mailhog()
        # Send mail async from redis queue
        redis = get_redis_connection(app_config)
        queue = get_rq_queue(redis, "mail_sender")
        worker = SimpleWorker([queue], connection=queue.connection)
        worker.work(burst=True)

        response = mailhog.get_mailhog_mails()
        assert len(response) == 3
        valid_dests = [
            "Global manager <*****@*****.**>",
            "*****@*****.**",
            "toto <[email protected]>",
        ]
        for email in response:
            assert (
                email["Content"]["Headers"]["From"][0]
                == "Tracim Notifications <test_user_from+0@localhost>"
                or "Global manager via Tracim <test_user_from+1@localhost>"
            )
            headers = email["Content"]["Headers"]
            assert headers["To"][0] in valid_dests
            valid_dests.remove(headers["To"][0])
        assert valid_dests == []
Ejemplo n.º 38
0
    def test_microservice(self):
        """Test the microservice."""
        redis_conn.flushall()
        req_1 = self.app.post('/put', data={'int': 1})
        req_1_as_json = json.loads(req_1.data)
        assert req_1_as_json['integer_received'] == 1
        req_2 = self.app.post('/put', data={'int': 1})
        del req_2
        time.sleep(1)  # Staggering requests
        req_3 = self.app.post('/put', data={'int': 4})
        del req_3
        req_4 = self.app.post('/put', data={'int': 6})
        del req_4
        time.sleep(1)  # Staggering requests
        req_5 = self.app.post('/put', data={'int': 8})
        del req_5
        req_6 = self.app.post('/put', data={'int': 10})
        del req_6
        median_req = self.app.get('/median')
        median_as_dict = json.loads(median_req.data)
        median_not_finished_job_req = self.app.get(
            '/tasks/{}'.format(median_as_dict['task_id'])
        )
        assert median_not_finished_job_req.status_code == 202
        not_finished_as_dict = json.loads(median_not_finished_job_req.data)
        assert not_finished_as_dict['message'] == (
            "Still processing..."
        )
        time.sleep(1)
        with Connection(redis_conn):
            queue = Queue(connection=redis_conn)
            worker = SimpleWorker([queue])
            worker.work(burst=True)

        median_job_req = self.app.get(
            '/tasks/{}'.format(median_as_dict['task_id'])
        )
        results_as_dict = json.loads(median_job_req.data)
        assert results_as_dict['task_results']['result'] == 5
Ejemplo n.º 39
0
    def test_func__create_user_with_mail_notification__ok__nominal_case(self):
        api = UserApi(
            current_user=None,
            session=self.session,
            config=self.app_config,
        )
        u = api.create_user(
            email='bob@bob',
            password='******',
            name='bob',
            timezone='+2',
            do_save=True,
            do_notify=True,
        )
        assert u is not None
        assert u.email == "bob@bob"
        assert u.validate_password('password')
        assert u.display_name == 'bob'
        assert u.timezone == '+2'

        # Send mail async from redis queue
        redis = get_redis_connection(
            self.app_config
        )
        queue = get_rq_queue(
            redis,
            'mail_sender',
        )
        worker = SimpleWorker([queue], connection=queue.connection)
        worker.work(burst=True)
        # check mail received
        response = self.get_mailhog_mails()
        headers = response[0]['Content']['Headers']
        assert headers['From'][0] == 'Tracim Notifications <test_user_from+0@localhost>'  # nopep8
        assert headers['To'][0] == 'bob <bob@bob>'
        assert headers['Subject'][0] == '[TRACIM] Created account'
Ejemplo n.º 40
0
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

import redis
from rq import Connection
from rq import Queue
from rq import SimpleWorker

from codecoverage_backend import secrets

conn = redis.from_url(secrets.REDIS_URL)


def exc_handler(job, *exc_info):
    job.cleanup(ttl=3600)


if __name__ == '__main__':
    with Connection(conn):
        worker = SimpleWorker(map(Queue, ['default']), exception_handlers=[])
        worker.push_exc_handler(exc_handler)
        worker.push_exc_handler(worker.move_to_failed_queue)
        worker.work()
Ejemplo n.º 41
0
def execute_all_jobs(worker_queue, redis_con):
    worker = SimpleWorker([worker_queue], connection=redis_con)
    worker.work(burst=True)