Ejemplo n.º 1
0
    def test_missing_genome_section(self):
        expected_counts = {
            'batches': 1,
            'results_per_sample': [
                {
                    'batch': 'batch1',
                    'sample': 'NA12891',
                    'count': 1963,
                },
                {
                    'batch': 'batch1',
                    'sample': 'NA12892',
                    'count': 1963,
                },
                {
                    'batch': 'batch1',
                    'sample': 'NA12878',
                    'count': 1963,
                }
            ],
            'samples': 3,
            'samples_per_batch': [(1, 3)],
        }
        expected_counts['results'] = \
            sum([x['count'] for x in expected_counts['results_per_sample']])

        # Immediately validates and creates a sample.
        management.call_command('samples', 'queue')

        # Synchronously work on queue.
        worker1 = get_worker('variants')
        worker2 = get_worker('default')

        # Work on variants.
        worker1.work(burst=True)

        # Work on effects.
        worker2.work(burst=True)

        # Since the MANIFEST for Batch 2 has no genome version listed, we
        # should only have data for samples in Batch 1. Perform all the checks
        # against our trimmed list of expected counts.
        self.assertEqual(Result.objects.count(), expected_counts['results'])

        # Batches are now published..
        self.assertEqual(Batch.objects.filter(published=True).count(),
                         expected_counts['batches'])

        # Ensure the counts are accurate for each sample..
        for ec in expected_counts['results_per_sample']:
            sample = Sample.objects.get(name=ec['sample'],
                                        batch__name=ec['batch'])
            self.assertTrue(sample.published)
            self.assertEqual(sample.count, ec['count'])

        # Batches are created with the samples, but are unpublished
        for pk, count in expected_counts['samples_per_batch']:
            batch = Batch.objects.get(pk=pk)
            self.assertTrue(batch.published)
            self.assertEqual(batch.count, count)
Ejemplo n.º 2
0
    def test_invalid_url(self):
        """
        Tests creating jobs with URLs that are not
        Publico's News
        """
        response = self.api.post(
            reverse("cm_url_search"),
            {
                "urls": [
                    "https://www.google.pt/",
                    # Notice that the following link is not a news
                    "https://www.cmjornal.pt/",
                ]
            },
            format="json",
        )
        # Assert that a `job_id` is returned
        self.assertIn("job_id", response.data)

        # Assert that a `results_url` is returned
        self.assertIn("results_url", response.data)

        # Make the worker dispatch all jobs in sync mode
        get_worker().work(burst=True)

        # Now make the request to get the results
        response = self.api.get(response.data["results_url"])

        # Assert that response is status code 200
        self.assertEqual(
            response.status_code,
            status.HTTP_200_OK,
        )
Ejemplo n.º 3
0
    def setUp(self):
        super(DeleteTestCase, self).setUp()

        # Immediately validates and creates a sample
        management.call_command('samples', 'queue')

        # Synchronously work on queue
        worker1 = get_worker('variants')
        worker2 = get_worker('default')

        # Work on variants...
        worker1.work(burst=True)

        # Work on effects...
        worker2.work(burst=True)

        # Create and record some data that will be used to create knowledge
        # capture assessments later on.
        self.pathogenicity = Pathogenicity(name='pathogenic')
        self.pathogenicity.save()
        self.parental_result = ParentalResult(name='heterozygous')
        self.parental_result.save()
        self.category = AssessmentCategory(name='other')
        self.category.save()
        self.user = User.objects.all()[0]
Ejemplo n.º 4
0
 def test_anonymous_post(self):
     url = reverse('workery_franchise_list_create_api_endpoint')
     post_data = json.dumps({
         "schema_name": "mikasoftware",
         "postal_code": "n6j4x4",
         "name": "Mika Software Corporation",
         "alternate_name": "Mika Software",
         "description": "An open source software company.",
         "url": "https://mikasoftware.com",
         "timezone_name": "America/Toronto",
         "address_country": "Canada",
         "address_locality": "London",
         "address_region": "Ontario",
         "postal_code": "N6J4X4",
         "street_address": "120 Centre Street",
         "street_address_extra": "Unit 102"
     })
     response = self.anon_client.post(url,
                                      data=post_data,
                                      content_type='application/json')
     get_worker().work(
         burst=True
     )  # Processes all BACKGROUND jobs in FOREGROUND then stop. (Note: https://stackoverflow.com/a/12273705)
     self.assertIsNotNone(response)
     self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
Ejemplo n.º 5
0
    def test(self):
        "Load a single VCF, reload the snpEff data using the same VCF."
        management.call_command('samples', 'queue',
                                os.path.join(SAMPLE_DIRS[0], 'batch1/locus_1'))

        # Synchronously work on queue
        worker1 = get_worker('variants')
        worker2 = get_worker('default')
        worker1.work(burst=True)
        worker2.work(burst=True)

        expected_variant_effects_count = 5426

        self.assertEqual(VariantEffect.objects.count(),
                         expected_variant_effects_count)
        self.assertEqual(
            VariantEffect.objects.aggregate(max_id=Max('id'))['max_id'],
            expected_variant_effects_count)

        management.call_command('variants', 'reload-snpeff',
                                os.path.join(SAMPLE_DIRS[0],
                                             'batch1/locus_1/locus_1.vcf'))

        # Ensure data was actually reloaded, check the auto-incremented key
        self.assertEqual(VariantEffect.objects.count(),
                         expected_variant_effects_count)

        # Since we reloaded, we should now have double the number of expected
        # results, thus the 2 * operation in the assertion below.
        self.assertEqual(
            VariantEffect.objects.aggregate(max_id=Max('id'))['max_id'],
            2 * expected_variant_effects_count)
Ejemplo n.º 6
0
    def test_sending_mail_after_creating_team(self):
        self.client.force_authenticate(user=self.user1)
        data = create_valid_team_data()

        response = self.client.post(self.url_team_list, data, format='json')
        django_rq.get_worker().work(burst=True)
        self.assertEqual(len(mail.outbox), len(data['members']) + 1)
Ejemplo n.º 7
0
    def test_auto_albums(self):
        '''make sure user can make auto albums, list and retrieve them'''
        # make auto albums
        auto_album_gen_res = self.client_users[0].get('/api/autoalbumgen/')
        self.assertEqual(auto_album_gen_res.status_code, 200)
        get_worker().work(burst=True)

        # make sure auto albums are there
        auto_album_list_res = self.client_users[0].get(
            '/api/albums/auto/list/')
        self.assertEqual(auto_album_list_res.status_code, 200)

        # make sure user can retrieve each auto album
        for album in auto_album_list_res.json()['results']:
            auto_album_retrieve_res = self.client_users[0].get(
                '/api/albums/auto/%d/' % album['id'])
            self.assertEqual(auto_album_retrieve_res.status_code, 200)
            self.assertTrue(len(auto_album_retrieve_res.json()['photos']) > 0)

        # try making auto albums again and make sure there are no duplicates
        num_auto_albums = len(auto_album_list_res.json()['results'])

        auto_album_gen_res = self.client_users[0].get('/api/autoalbumgen/')
        self.assertEqual(auto_album_gen_res.status_code, 200)
        get_worker().work(burst=True)

        auto_album_list_res = self.client_users[0].get(
            '/api/albums/auto/list/')
        self.assertEqual(
            len(auto_album_list_res.json()['results']), num_auto_albums)
Ejemplo n.º 8
0
 def test_run_once_job(self):
     """run an single run job with arguments and check if it gets deleted"""
     test_job = Job.objects.get(
         task='django_rq_jobs.tests.tasks.django_arg_check')
     management.call_command('rqjobs')
     get_worker('default').work(burst=True)
     self.assertFalse(Job.objects.filter(pk=test_job.pk).exists())
Ejemplo n.º 9
0
    def test_cant_retrieve_pending_connection_user_user_encircled_post_media_video(
            self):
        """
        should be able to retrieve an pending_connection_user_user encircled post media video
        """
        user = make_user()
        pending_connection_user_user = make_user()

        pending_connection_user_user.connect_with_user_with_id(user_id=user.pk)

        headers = make_authentication_headers_for_user(user=user)

        test_video = get_test_video()

        with open(test_video['path'], 'rb') as file:
            file = File(file)
            circle = make_circle(creator=pending_connection_user_user)
            post = pending_connection_user_user.create_encircled_post(
                video=file, circles_ids=[circle.pk])

        get_worker('high', worker_class=SimpleWorker).work(burst=True)

        url = self._get_url(post=post)

        response = self.client.get(url, **headers, format='multipart')

        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
Ejemplo n.º 10
0
    def test_can_retrieve_follower_user_post_media_video(self):
        """
        should be able to retrieve an follower_user post media video
        """
        user = make_user()
        follower_user = make_user()

        follower_user.follow_user(user=user)

        headers = make_authentication_headers_for_user(user=user)

        test_video = get_test_video()

        with open(test_video['path'], 'rb') as file:
            file = File(file)
            post = follower_user.create_public_post(video=file)

        get_worker('high', worker_class=SimpleWorker).work(burst=True)

        url = self._get_url(post=post)

        response = self.client.get(url, **headers, format='multipart')

        self.assertEqual(response.status_code, status.HTTP_200_OK)

        response_media = json.loads(response.content)

        post.refresh_from_db()

        post_media = post.get_media().all()

        self._compare_response_media_with_post_media(
            post_media=post_media, response_media=response_media)
Ejemplo n.º 11
0
    def test_cannot_retrieve_private_community_not_part_of_post_media_video(
            self):
        """
        should not be able to retrieve an private_community not part of post media video
        """
        user = make_user()
        community_creator = make_user()
        private_community = make_community(
            creator=community_creator, type=Community.COMMUNITY_TYPE_PRIVATE)

        headers = make_authentication_headers_for_user(user=user)

        test_video = get_test_video()

        with open(test_video['path'], 'rb') as file:
            file = File(file)
            post = community_creator.create_community_post(
                video=file, community_name=private_community.name)

        get_worker('high', worker_class=SimpleWorker).work(burst=True)

        url = self._get_url(post=post)

        response = self.client.get(url, **headers, format='multipart')

        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
Ejemplo n.º 12
0
    def test_sending_mail_after_deleting_team(self):
        self.client.force_authenticate(user=self.user1)

        first_number_of_mail = len(mail.outbox)
        response = self.client.delete(self.url_team_detail)
        django_rq.get_worker().work(burst=True)
        self.assertEqual(len(mail.outbox), first_number_of_mail + len(self.data['members']) + 1)
Ejemplo n.º 13
0
        def test_makemessages_django_tq_more_jobs(self):
            queue = get_queue('default')
            queue.enqueue(tasks.makemessages_task)
            queue.enqueue(tasks.makemessages_task)
            queue.enqueue(tasks.makemessages_task)

            get_worker().work(burst=True)
Ejemplo n.º 14
0
    def test_correct_job_response(self):
        """
        Tests that the correct elements are returned when
        fetching job results.
        """
        response = self.api.post(
            reverse("publico_url_search"),
            {
                "urls": [
                    "https://www.publico.pt/2021/01/31/economia/noticia/irs-contribuintes-podem-validar-agregado-familiar-ate-15-fevereiro-1948701"
                ],
            },
        )

        # Assert that a `job_id` is returned
        self.assertIn("job_id", response.data)

        # Assert that a `results_url` is returned
        self.assertIn("results_url", response.data)

        # here we dispatch the worker so that job gets done in sync mode
        get_worker().work(burst=True)
        # Now make the request to get the results
        response = self.api.get(response.data["results_url"])

        self.assertIn("number_of_news", response.data)
        self.assertIn("date", response.data)
        # Check that date is (almost) equal to now
        # maximum 1 sec diff
        self.assertTrue(
            abs(now() - datetime_from_string(
                response.data["date"],
                order="YMD",
            )) < datetime.timedelta(seconds=1))
        self.assertIn("news", response.data)
Ejemplo n.º 15
0
def rq_worker(request):
    [queue.empty() for queue in django_rq.get_worker().queues]

    worker = django_rq.get_worker()

    yield worker

    [queue.empty() for queue in django_rq.get_worker().queues]
Ejemplo n.º 16
0
def rq_worker(request):
    [queue.empty() for queue in django_rq.get_worker().queues]

    worker = django_rq.get_worker()

    yield worker

    [queue.empty() for queue in django_rq.get_worker().queues]
Ejemplo n.º 17
0
    def test_sending_mail_after_updating_member_data(self):
        url = reverse('api:member-detail', kwargs={'pk': Team.objects.get(name=self.data['name']).pk, 'member_pk': Member.objects.get(name=self.data['members'][0]['name']).pk})
        self.client.force_authenticate(user=self.user1)

        first_number_of_email = len(mail.outbox)
        data = create_valid_member_data()
        response = self.client.put(url, data, format='json')
        django_rq.get_worker().work(burst=True)
        self.assertEqual(len(mail.outbox), first_number_of_email + len(self.data['members']) + 1)
Ejemplo n.º 18
0
    def test_sending_mail_after_updating_team_data(self):
        self.client.force_authenticate(user=self.user1)

        first_number_of_mail = len(mail.outbox)
        self.assertEqual(first_number_of_mail, len(self.data['members']) + 1)

        response = self.client.put(self.url_team_detail, self.data, format='json')
        django_rq.get_worker().work(burst=True)
        self.assertEqual(len(mail.outbox), first_number_of_mail + len(self.data['members']) + 1)
Ejemplo n.º 19
0
def test_rq(settings, image, instance_no_image):
    settings.SIMPLEIMAGES_TRANSFORM_CALLER = 'django_rq.enqueue'

    instance_no_image.image.save(image.name, image.django_file)

    django_rq.get_worker().work(burst=True)

    instance_no_image = instance_no_image.retrieve_from_database()
    assert instance_no_image.thumbnail
Ejemplo n.º 20
0
 def test_run_limited_job(self):
     """run a limited run job twice to see if it counts down and gets deleted"""
     test_job = Job.objects.create(task='django_rq_jobs.tests.tasks.django_check', schedule_type=Job.HOURLY, repeats=2,
                                   next_run=timezone.now() + timedelta(hours=-2))
     management.call_command('rqjobs')
     get_worker('default').work(burst=True)
     self.assertEqual(Job.objects.get(pk=test_job.pk).repeats, 1)
     management.call_command('rqjobs')
     get_worker('default').work(burst=True)
     self.assertFalse(Job.objects.filter(pk=test_job.pk).exists())
Ejemplo n.º 21
0
    def test_sending_mail_after_deleting_member(self):
        url = reverse('api:member-detail', kwargs={'pk': Team.objects.get(name=self.data['name']).pk, 'member_pk': Member.objects.get(name=self.data['members'][0]['name']).pk})
        self.client.force_authenticate(user=self.user1)

        first_number_of_mail = len(mail.outbox)
        response = self.client.delete(url)
        if len(self.data['members']) > settings.NUMBER_OF_MEMBERS[self.data['event']][0] - 1:
            django_rq.get_worker().work(burst=True)
            self.assertEqual(len(mail.outbox), first_number_of_mail + len(self.data['members']) + 1)
        else:
            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
Ejemplo n.º 22
0
    def test_rebuild_indices_using_subtasks(self):
        result = rebuild_indices(self.SEARCH, subtask_indexing=True)
        get_worker().work(burst=True)

        self.assertIsNotNone(result)
        self.assertIsInstance(result, dict)

        self.index.refresh()
        expected_index_count = self.published_entry_count + self.published_project_count
        if not self.PUBLISH_FILTER_ENABLED:
            expected_index_count += self.unpublished_entry_count + self.unpublished_project_count
        self.assertEqual(expected_index_count, self.search.count())
Ejemplo n.º 23
0
    def test_rebuild_indices_using_subtasks(self):
        result = rebuild_indices(self.SEARCH, subtask_indexing=True)
        get_worker().work(burst=True)

        self.assertIsNotNone(result)
        self.assertIsInstance(result, dict)

        self.index.refresh()
        expected_index_count = self.published_entry_count + self.published_project_count
        if not self.PUBLISH_FILTER_ENABLED:
            expected_index_count += self.unpublished_entry_count + self.unpublished_project_count
        self.assertEqual(expected_index_count, self.search.count())
Ejemplo n.º 24
0
 def test_run_limited_job(self):
     """run a limited run job twice to see if it counts down and gets deleted"""
     test_job = Job.objects.create(
         task='django_rq_jobs.tests.tasks.django_check',
         schedule_type=Job.HOURLY,
         repeats=2,
         next_run=timezone.now() + timedelta(hours=-2))
     management.call_command('rqjobs')
     get_worker('default').work(burst=True)
     self.assertEqual(Job.objects.get(pk=test_job.pk).repeats, 1)
     management.call_command('rqjobs')
     get_worker('default').work(burst=True)
     self.assertFalse(Job.objects.filter(pk=test_job.pk).exists())
    def begin_processing(self, schema_name, name, alternate_name, description,
                         country, city, province, street_number, street_name,
                         apartment_unit, street_type, street_type_other,
                         street_direction, postal_code, timezone_name,
                         police_report_url, default_position, default_zoom):
        """
        Functin will create a new tenant based on the parameters.
        """

        # Create your tenant
        tenant = SharedOrganization(
            schema_name=schema_name,
            name=name,
            alternate_name=alternate_name,
            description=description,
            country=country,
            city=city,
            province=province,
            street_number=street_number,
            street_name=street_name,
            apartment_unit=apartment_unit,
            street_type=street_type,
            street_type_other=street_type_other,
            street_direction=street_direction,
            postal_code=postal_code,
            timezone_name=timezone_name,
            police_report_url=police_report_url,
            default_position=default_position,
            default_zoom=default_zoom,
        )
        tenant.save()

        get_worker().work(
            burst=True
        )  # Processes all BACKGROUND jobs in FOREGROUND then stop. (Note: https://stackoverflow.com/a/12273705

        # Add one or more domains for the tenant
        domain = SharedOrganizationDomain()
        domain.domain = settings.NWAPP_BACKEND_HTTP_DOMAIN
        domain.domain = tenant.schema_name + '.' + settings.NWAPP_BACKEND_HTTP_DOMAIN
        domain.tenant = tenant
        domain.is_primary = False
        domain.save()

        get_worker().work(
            burst=True
        )  # Processes all BACKGROUND jobs in FOREGROUND then stop. (Note: https://stackoverflow.com/a/12273705

        # Populate our new organization tenant with post-creation data.
        call_command('populate_tenant_content', schema_name, verbosity=0)
Ejemplo n.º 26
0
    def test_sending_mail_after_post(self):
        self.client.force_authenticate(user=self.user1)

        data = create_valid_member_data()
        first_number_of_mail = len(mail.outbox)

        response = self.client.post(self.url_member_list, data, format='json')

        if len(self.data['members']) < settings.NUMBER_OF_MEMBERS[self.data['event']][1] - 1:

            django_rq.get_worker().work(burst=True)
            self.assertEqual(len(mail.outbox), first_number_of_mail + len(self.data['members']) + 2)
        else:
            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
Ejemplo n.º 27
0
 def test_create_media(self):
     # We must import MediaAdmin here and not in the header
     # otherwise it's loaded before settings and thus, fails.
     from .admin import MediaAdmin
     ma = MediaAdmin(Media, self.site)
     request = self.factory.get('/admin/archives/media/add')
     request.user = self.user
     obj =  MediaFactory.build()
     # As obj is not saved, it hasn't got an id
     self.assertEqual(obj.id, None)
     ma.save_model(request, obj, None, False)
     # Below, we ensure that object has been created as it has an id.
     self.assertTrue(isinstance(obj.id, int))
     get_worker().work(burst=True)
Ejemplo n.º 28
0
 def test_run_job(self):
     """run a job and check if it's rescheduled properly"""
     management.call_command('rqjobs')
     get_worker('default').work(burst=True)
     test_job = Job.objects.get(task='django_rq_jobs.tests.tasks.django_check', schedule_type=Job.HOURLY)
     self.assertNotEqual(test_job, None)
     self.assertNotEqual(test_job.rq_id, None)
     self.assertNotEqual(test_job.rq_origin, None)
     self.assertIsNot(test_job.rq_job, None)
     self.assertNotEqual(test_job.rq_status(), None)
     self.assertNotEqual(test_job.rq_origin, None)
     self.assertNotEqual(test_job.rq_link(), None)
     self.assertNotEqual(test_job.last_run, None)
     self.assertTrue(test_job.next_run > timezone.now())
Ejemplo n.º 29
0
    def setUp(self):
        super(AlleleTestCase, self).setUp()

        # Immediately validates and creates a sample
        management.call_command("samples", "queue")

        # Synchronously work on queue
        worker1 = get_worker("variants")
        worker2 = get_worker("default")

        # Work on variants...
        worker1.work(burst=True)

        # Work on effects...
        worker2.work(burst=True)
Ejemplo n.º 30
0
 def test_get_worker_default(self):
     """
     By default, ``get_worker`` should return worker for ``default`` queue.
     """
     worker = get_worker()
     queue = worker.queues[0]
     self.assertEqual(queue.name, 'default')
Ejemplo n.º 31
0
    def test_work(self, mocker):
        close_database = mocker.patch(
            "metadeploy.rq_worker.ConnectionClosingWorker.close_database")
        worker = get_worker()
        worker.work(burst=True)

        assert close_database.called
Ejemplo n.º 32
0
def worker():
    # Clear queue
    q = django_rq.get_queue()
    q.empty()

    worker = django_rq.get_worker()
    return worker
Ejemplo n.º 33
0
 def test_request_force_stop__dead_horse(self, mocker):
     kill = mocker.patch("os.kill", side_effect=OSError)
     worker = get_worker()
     job = mocker.MagicMock()
     worker._horse_pid = 1
     with pytest.raises(OSError):
         worker.request_force_stop(signal.SIGTERM, None)
Ejemplo n.º 34
0
    def test_media_encoded_state(self):
        #we create media without encoding
        self.media = MediaFactory.build(title='test_media_encoded_state', file__from_path=os.path.join(os.path.dirname(__file__), 'tests/data/audio-mini.mp3'))
        self.media.save(encode=False)
        
        #check it has been well created and encoding state is not encoded
        self.assertEqual(self.media.id, 1)
        self.assertEqual(self.media.encoding_state, ENCODING_NOT_ENCODED)

        #put it in encoding queue
        queue = django_rq.get_queue('default')
        job = queue.enqueue(call_command, args=('encode', self.media.id))

        #check job is in encoding queue
        self.assertTrue(job.is_queued)

        #start encoding
        worker = get_worker('default')
        worker.work(burst=True)

        #check encoding state is encoded, job is now in finished status
        self.assertEqual(self.media.encoding_state, ENCODING_ENCODED)
        self.assertFalse(job.is_queued)
        self.assertTrue(job.is_finished)
        failed_queue = Queue(name='failed', connection=queue.connection)
        self.assertFalse(job.id in failed_queue.job_ids)
Ejemplo n.º 35
0
 def test_run_job(self):
     """run a job and check if it's rescheduled properly"""
     management.call_command('rqjobs')
     get_worker('default').work(burst=True)
     test_job = Job.objects.get(
         task='django_rq_jobs.tests.tasks.django_check',
         schedule_type=Job.HOURLY)
     self.assertNotEqual(test_job, None)
     self.assertNotEqual(test_job.rq_id, None)
     self.assertNotEqual(test_job.rq_origin, None)
     self.assertIsNot(test_job.rq_job, None)
     self.assertNotEqual(test_job.rq_status(), None)
     self.assertNotEqual(test_job.rq_origin, None)
     self.assertNotEqual(test_job.rq_link(), None)
     self.assertNotEqual(test_job.last_run, None)
     self.assertTrue(test_job.next_run > timezone.now())
Ejemplo n.º 36
0
    def test_request_stop__current_job(self, mocker):
        signal = mocker.patch("signal.signal")
        worker = get_worker()
        worker.get_current_job = mocker.MagicMock()
        worker.request_stop(signal.SIGINT, None)

        # worker flagged to stop
        assert worker._stopped
Ejemplo n.º 37
0
    def test_work(self, mocker):
        close_database = mocker.patch(
            "{{cookiecutter.project_slug}}.rq_worker.ConnectionClosingWorker.close_database"
        )
        worker = get_worker()
        worker.work(burst=True)

        assert close_database.called
Ejemplo n.º 38
0
    def test_request_stop(self, mocker):
        signal = mocker.patch("signal.signal")
        worker = get_worker()
        with pytest.raises(StopRequested):
            worker.request_stop(signal.SIGINT, None)

        # new signal handlers installed
        assert signal.call_count == 2
Ejemplo n.º 39
0
    def test_create_new_app_user(self):
        data = {
            'app_facebook_id': self.mobile_app.facebook_id,
            'oauth_token': user_oauth_token,
            'facebook_id': user_facebook_id
        }
        response = self.client.post('/appUserLogin/', data, format='json')
        self.assertEqual(response.status_code, status.HTTP_201_CREATED)

        get_worker().work(burst=True) # Process all jobs, then stop

        # Test that an AppUser was actually created
        user = AppUser.objects.get(facebook_id=user_facebook_id)
        self.assertEqual(isinstance(user, AppUser), True)

        # Test that his friends were created
        user_friends_count = user.friends.all().count()
        self.assertEqual(user_friends_count, 4)
Ejemplo n.º 40
0
    def test_close_database__good(self, mocker):
        conn = MagicMock()
        all_ = mocker.patch("django.db.connections.all")
        all_.return_value = [conn]

        worker = get_worker()
        worker.close_database()

        assert conn.close.called
Ejemplo n.º 41
0
    def test_close_database__database_error__reraise(self, mocker):
        conn = MagicMock()
        conn.close.side_effect = DatabaseError("reraise me")
        all_ = mocker.patch("django.db.connections.all")
        all_.return_value = [conn]

        worker = get_worker()
        with pytest.raises(DatabaseError):
            worker.close_database()
Ejemplo n.º 42
0
    def test_create_new_app_user(self):
        data = {
            'app_facebook_id': self.mobile_app.facebook_id,
            'oauth_token': user_oauth_token,
            'facebook_id': user_facebook_id
        }
        response = self.client.post('/appUserLogin/', data, format='json')
        self.assertEqual(response.status_code, status.HTTP_201_CREATED)

        get_worker().work(burst=True)  # Process all jobs, then stop

        # Test that an AppUser was actually created
        user = AppUser.objects.get(facebook_id=user_facebook_id)
        self.assertEqual(isinstance(user, AppUser), True)

        # Test that his friends were created
        user_friends_count = user.friends.all().count()
        self.assertEqual(user_friends_count, 4)
Ejemplo n.º 43
0
    def test_perform_job(self, mocker):
        close_database = mocker.patch(
            "statusite.worker.RequeueingWorker.close_database")
        mocker.patch("rq.worker.Worker.perform_job")

        worker = get_worker()
        # Symbolic call only, since we've mocked out the super:
        worker.perform_job(None, None)

        assert close_database.called
Ejemplo n.º 44
0
    def test_perform_job(self, mocker):
        close_database = mocker.patch(
            "metadeploy.rq_worker.ConnectionClosingWorker.close_database")
        mocker.patch("rq.worker.Worker.perform_job")

        worker = get_worker()
        # Symbolic call only, since we've mocked out the super:
        worker.perform_job(None, None)

        assert close_database.called
Ejemplo n.º 45
0
    def test_close_database__database_error__no_reraise(self, mocker):
        conn = MagicMock()
        conn.close.side_effect = DatabaseError("closed not connected don't reraise me")
        all_ = mocker.patch("django.db.connections.all")
        all_.return_value = [conn]

        worker = get_worker()
        worker.close_database()

        assert conn.close.called
Ejemplo n.º 46
0
    def test_close_database__interface_error(self, mocker):
        conn = MagicMock()
        conn.close.side_effect = InterfaceError()
        all_ = mocker.patch("django.db.connections.all")
        all_.return_value = [conn]

        worker = get_worker()
        worker.close_database()

        assert conn.close.called
Ejemplo n.º 47
0
    def startWorkers(self):
        # Find the number of current workers
        queues = getattr(settings, 'RQ_QUEUES', {})
        default = queues['default'] if 'default' in queues else None
        variants = queues['variants'] if 'variants' in queues else None

        if not (queues and default and variants):
            log.warning('RQ_QUEUES settings could not be found')
            return

        # Create connections to redis to identify the workers
        def_connection = redis.Redis(host=default['HOST'],
                                     port=default['PORT'],
                                     db=default['DB'])
        var_connection = redis.Redis(host=variants['HOST'],
                                     port=variants['PORT'],
                                     db=variants['DB'])

        # Get all the workers connected with our redis server
        try:
            all_workers = Worker.all(def_connection) + \
                Worker.all(var_connection)
        except ConnectionError:
            log.warning('Could not connect to redis server to create workers. '
                        'Please make sure Redis server is running')
            return

        found_default = False
        found_variant = False

        # Loop through all the workers (even duplicates)
        for worker in all_workers:
            found_default = found_default or 'default' in worker.queue_names()
            found_variant = found_variant or 'variants' in worker.queue_names()

        # Start the required worker
        if not found_variant:
            log.debug('Did not find variants worker. Starting ... ')
            get_worker('variants').work(burst=True)

        if not found_default:
            log.debug('Did not find default worker. Starting ... ')
            get_worker('default').work(burst=True)
Ejemplo n.º 48
0
    def test_request_force_stop(self, mocker):
        kill = mocker.patch("os.kill")
        worker = get_worker()
        job = mocker.MagicMock()
        worker.get_current_job = mocker.MagicMock(return_value=job)
        worker._horse_pid = 1
        with pytest.raises(SystemExit):
            worker.request_force_stop(signal.SIGTERM, None)

        job.func.delay.assert_called_once()
        kill.assert_called_once_with(1, signal.SIGKILL)
Ejemplo n.º 49
0
    def test_wrong_genome_version(self):
        # Immediately validates and creates a sample.
        management.call_command('samples', 'queue')

        # Synchronously work on queue.
        worker1 = get_worker('variants')
        worker2 = get_worker('default')

        # Work on variants.
        worker1.work(burst=True)

        # Work on effects.
        worker2.work(burst=True)

        # Since the genome version was required but does not match any of the
        # versions specified in the MANIFESTs, we should have no data.
        self.assertEqual(Variant.objects.count(), 0)
        self.assertEqual(Result.objects.count(), 0)
        self.assertEqual(Sample.objects.count(), 0)
        self.assertEqual(Cohort.objects.count(), 0)
        self.assertEqual(Batch.objects.count(), 0)
        self.assertEqual(Project.objects.count(), 0)
Ejemplo n.º 50
0
    def test(self):
        "Load a single VCF, reload the snpEff data using the same VCF."
        management.call_command('samples', 'queue',
                                os.path.join(SAMPLE_DIRS[0], 'batch1/sample1'))

        # Synchronously work on queue
        worker1 = get_worker('variants')
        worker2 = get_worker('default')
        worker1.work(burst=True)
        worker2.work(burst=True)

        self.assertEqual(VariantEffect.objects.count(), 614)
        self.assertEqual(
            VariantEffect.objects.aggregate(max_id=Max('id'))['max_id'], 614)

        management.call_command('variants', 'reload-snpeff',
                                os.path.join(SAMPLE_DIRS[0],
                                             'batch1/sample1/results.vcf'))

        # Ensure data was actually reloaded, check the auto-incremented key
        self.assertEqual(VariantEffect.objects.count(), 614)
        self.assertEqual(VariantEffect.objects.aggregate(
            max_id=Max('id'))['max_id'], 614 * 2)
Ejemplo n.º 51
0
 def test_run_once_job(self):
     """run an single run job with arguments and check if it gets deleted"""
     test_job = Job.objects.get(task='django_rq_jobs.tests.tasks.django_arg_check')
     management.call_command('rqjobs')
     get_worker('default').work(burst=True)
     self.assertFalse(Job.objects.filter(pk=test_job.pk).exists())
Ejemplo n.º 52
0
def run_jobs():
    """
    Execute all the pending jobs.
    """
    get_worker(settings.ASYNC_QUEUE).work(burst=True)
Ejemplo n.º 53
0
def createWorker():
    worker = django_rq.get_worker()
    worker.work()
Ejemplo n.º 54
0
    def test_pipeline(self):
        expected_counts = {
            'batches': 2,
            'cohorts': 2,
            'genes': 65,
            'projects': 1,
            'results_per_sample': [
                {
                    'batch': 'batch1',
                    'sample': 'NA12891',
                    'count': 1963,
                },
                {
                    'batch': 'batch1',
                    'sample': 'NA12892',
                    'count': 1963,
                },
                {
                    'batch': 'batch1',
                    'sample': 'NA12878',
                    'count': 1963,
                },
                {
                    'batch': 'batch2',
                    'sample': 'NA12891',
                    'count': 2094,
                },
                {
                    'batch': 'batch2',
                    'sample': 'NA12892',
                    'count': 2094,
                },
                {
                    'batch': 'batch2',
                    'sample': 'NA12878',
                    'count': 2094,
                },
            ],
            'samples': 6,
            'transcripts': 108,
            'variant_effects': 8788,
            'variants': 4057,
            'samples_per_batch': [(1, 3), (2, 3)],
        }
        expected_counts['results'] = \
            sum([x['count'] for x in expected_counts['results_per_sample']])

        # Immediately validates and creates a sample
        management.call_command('samples', 'queue')

        # Synchronously work on queue
        worker1 = get_worker('variants')
        worker2 = get_worker('default')

        # Ensure sample-related entries are created..
        self.assertEqual(Project.objects.count(), expected_counts['projects'])
        self.assertEqual(Batch.objects.count(), expected_counts['batches'])
        self.assertEqual(Sample.objects.count(), expected_counts['samples'])

        # World and project cohort..
        self.assertEqual(Cohort.objects.count(), expected_counts['cohorts'])

        # Nothing published yet..
        self.assertEqual(Sample.objects.filter(published=False).count(),
                         expected_counts['samples'])
        self.assertEqual(
            Cohort.objects.filter(count=0, published=False).count(),
            expected_counts['cohorts'])
        self.assertEqual(
            Batch.objects.filter(count=0, published=False).count(),
            expected_counts['batches'])

        # Manifests are stored
        self.assertEqual(SampleManifest.objects.count(),
                         expected_counts['samples'])
        for manifest in SampleManifest.objects.all():
            self.assertNotEqual(manifest.content, '')
            self.assertFalse(manifest.content_has_changed())

        # Work on variants...
        worker1.work(burst=True)

        self.assertEqual(Variant.objects.count(), expected_counts['variants'])

        # Work on effects...
        worker2.work(burst=True)

        self.assertEqual(Gene.objects.count(), expected_counts['genes'])
        self.assertEqual(Transcript.objects.count(),
                         expected_counts['transcripts'])
        self.assertEqual(VariantEffect.objects.count(),
                         expected_counts['variant_effects'])

        self.assertEqual(Sift.objects.count(), 0)
        self.assertEqual(PolyPhen2.objects.count(), 0)
        self.assertEqual(ThousandG.objects.count(), 0)
        self.assertEqual(EVS.objects.count(), 0)

        # Results loaded..
        self.assertEqual(Result.objects.count(), expected_counts['results'])

        # Batches are now published..
        self.assertEqual(Batch.objects.filter(published=True).count(),
                         expected_counts['batches'])

        # Ensure the counts are accurate for each sample..
        for ec in expected_counts['results_per_sample']:
            sample = Sample.objects.get(name=ec['sample'],
                                        batch__name=ec['batch'])
            self.assertTrue(sample.published)
            self.assertEqual(sample.count, ec['count'])

        # Batches are created with the samples, but are unpublished
        for pk, count in expected_counts['samples_per_batch']:
            batch = Batch.objects.get(pk=pk)
            self.assertTrue(batch.published)
            self.assertEqual(batch.count, count)

        # Ensure the state changes were logged..
        system = System.get(Sample.objects.all()[0])
        self.assertEqual(len(system), 3)
Ejemplo n.º 55
0
    def test_pipeline(self):
        # Immediately validates and creates a sample
        management.call_command('samples', 'queue')

        # Synchronously work on queue
        worker1 = get_worker('variants')
        worker2 = get_worker('default')

        # Ensure sample-related entries are created..
        self.assertEqual(Project.objects.count(), 1)
        self.assertEqual(Batch.objects.count(), 4)
        self.assertEqual(Sample.objects.count(), 15)

        # World and project cohort..
        self.assertEqual(Cohort.objects.count(), 2)

        # Nothing published yet..
        self.assertEqual(Sample.objects.filter(published=False).count(), 15)
        self.assertEqual(
            Cohort.objects.filter(count=0, published=False).count(), 2)
        self.assertEqual(
            Batch.objects.filter(count=0, published=False).count(), 4)

        # Manifests are stored
        self.assertEqual(SampleManifest.objects.count(), 15)
        for manifest in SampleManifest.objects.all():
            self.assertNotEqual(manifest.content, '')
            self.assertFalse(manifest.content_has_changed())

        # Work on variants...
        worker1.work(burst=True)

        self.assertEqual(Variant.objects.count(), 674)

        # Work on effects...
        worker2.work(burst=True)

        self.assertEqual(Gene.objects.count(), 104)
        self.assertEqual(Transcript.objects.count(), 255)
        self.assertEqual(VariantEffect.objects.count(), 1418)

        self.assertEqual(Sift.objects.count(), 0)
        self.assertEqual(PolyPhen2.objects.count(), 0)
        self.assertEqual(ThousandG.objects.count(), 0)
        self.assertEqual(EVS.objects.count(), 0)

        # Results loaded..
        self.assertEqual(Result.objects.count(), 3436)

        # Batches are now published..
        self.assertEqual(Batch.objects.filter(published=True).count(), 3)

        # Ensure the counts are accurate for each sample..
        for pk, count in [(1, 289), (2, 281), (3, 268), (4, 295), (5, 296),
                          (6, 293), (7, 264), (8, 289), (9, 264), (10, 293),
                          (11, 289), (12, 315)]:
            sample = Sample.objects.get(pk=pk)
            self.assertTrue(sample.published)
            self.assertEqual(sample.count, count)

        # Batches are created with the samples, but are unpublished
        for pk, count in [(1, 5), (2, 3), (3, 4)]:
            batch = Batch.objects.get(pk=pk)
            self.assertTrue(batch.published)
            self.assertEqual(batch.count, count)

        # Ensure the state changes were logged..
        system = System.get(Sample.objects.all()[0])
        self.assertEqual(len(system), 3)
Ejemplo n.º 56
0
	def process_jobs(self):
		from django_rq import get_worker
		get_worker().work(burst=True)
Ejemplo n.º 57
0
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "thresher_backend.settings")

# Do time consuming Django setup before forking individual worker processes.
import django
django.setup()
from django.conf import settings

from django_rq import get_worker

if __name__ == '__main__':
    worker = get_worker('task_exporter', 'task_importer', 'file_importer',
                        'nlp_generator', 'nlp_importer')
    worker.work()