Beispiel #1
0
    def test_pillow_processes_changes(self, find_duplicate_cases_mock):
        kafka_sec = get_topic_offset(topics.FORM_SQL)

        case1 = self.factory.create_case(case_name="foo",
                                         case_type=self.case_type,
                                         update={"age": 2})
        case2 = self.factory.create_case(case_name="foo",
                                         case_type=self.case_type,
                                         update={"age": 2})
        find_duplicate_cases_mock.return_value = [case1.case_id, case2.case_id]

        AutomaticUpdateRule.clear_caches(
            self.domain, AutomaticUpdateRule.WORKFLOW_DEDUPLICATE)

        new_kafka_sec = get_topic_offset(topics.FORM_SQL)
        self.pillow.process_changes(since=kafka_sec, forever=False)

        self._assert_case_duplicate_pair(case1.case_id, [case2.case_id])
        self._assert_case_duplicate_pair(case2.case_id, [case1.case_id])
        self.assertEqual(CaseDuplicate.objects.count(), 2)
        self.assertEqual(
            CommCareCase.objects.get_case(
                case1.case_id, self.domain).get_case_property('age'), '5')
        self.assertEqual(
            CommCareCase.objects.get_case(case1.case_id, self.domain).name,
            'Herman Miller')

        # The new changes present should not be processed by the pillow
        # processor, since they were updates from a duplicate action.
        with patch.object(CaseDeduplicationProcessor,
                          '_process_case_update') as p:
            self.pillow.process_changes(since=new_kafka_sec, forever=False)
            p.assert_not_called()
Beispiel #2
0
 def _get_kafka_seq(self):
     # KafkaChangeFeed listens for multiple topics (form, form-sql) in the case search pillow,
     # so we need to provide a dict of seqs to kafka
     return {
         topics.FORM_SQL: get_topic_offset(topics.FORM_SQL),
         topics.FORM: get_topic_offset(topics.FORM)
     }
Beispiel #3
0
 def _get_kafka_seq(self):
     # KafkaChangeFeed listens for multiple topics (form, form-sql) in the case search pillow,
     # so we need to provide a dict of seqs to kafka
     return {
         topics.FORM_SQL: get_topic_offset(topics.FORM_SQL),
         topics.FORM: get_topic_offset(topics.FORM)
     }
Beispiel #4
0
    def test_app_pillow_kafka(self):
        consumer = get_test_kafka_consumer(topics.APP)
        # have to get the seq id before the change is processed
        kafka_seq = get_topic_offset(topics.APP)
        couch_seq = get_current_seq(Application.get_db())

        app_name = 'app-{}'.format(uuid.uuid4().hex)
        app = self._create_app(app_name)

        app_db_pillow = get_application_db_kafka_pillow('test_app_db_pillow')
        app_db_pillow.process_changes(couch_seq, forever=False)

        # confirm change made it to kafka
        message = next(consumer)
        change_meta = change_meta_from_kafka_message(message.value)
        self.assertEqual(app._id, change_meta.document_id)
        self.assertEqual(self.domain, change_meta.domain)

        # send to elasticsearch
        app_pillow = get_app_to_elasticsearch_pillow()
        app_pillow.process_changes(since=kafka_seq, forever=False)
        self.es.indices.refresh(APP_INDEX_INFO.index)

        # confirm change made it to elasticserach
        results = AppES().run()
        self.assertEqual(1, results.total)
        app_doc = results.hits[0]
        self.assertEqual(self.domain, app_doc['domain'])
        self.assertEqual(app['_id'], app_doc['_id'])
        self.assertEqual(app_name, app_doc['name'])
    def test_pillow(self):
        from corehq.apps.change_feed.topics import get_topic_offset
        from corehq.pillows.synclog import get_user_sync_history_pillow
        consumer = get_test_kafka_consumer(topics.SYNCLOG_SQL)
        # get the seq id before the change is published
        kafka_seq = get_topic_offset(topics.SYNCLOG_SQL)

        # make sure user has empty reporting-metadata before a sync
        self.assertEqual(self.ccuser.reporting_metadata.last_syncs, [])

        # do a sync
        synclog = SyncLog(domain=self.domain.name, user_id=self.ccuser._id,
                          date=datetime.datetime(2015, 7, 1, 0, 0))
        synclog.save()

        # make sure kafka change updates the user with latest sync info
        message = next(consumer)
        change_meta = change_meta_from_kafka_message(message.value)
        synclog = self._get_latest_synclog()
        self.assertEqual(change_meta.document_id, synclog._id)
        self.assertEqual(change_meta.domain, self.domain.name)

        # make sure processor updates the user correctly
        pillow = get_user_sync_history_pillow()
        pillow.process_changes(since=kafka_seq, forever=False)
        ccuser = CommCareUser.get(self.ccuser._id)
        self.assertEqual(len(ccuser.reporting_metadata.last_syncs), 1)
        self.assertEqual(ccuser.reporting_metadata.last_syncs[0].sync_date, synclog.date)
        self.assertEqual(ccuser.reporting_metadata.last_sync_for_user.sync_date, synclog.date)
Beispiel #6
0
    def test_login_as(self):
        self.web_user.is_superuser = True
        self.web_user.save()

        restore_uri = reverse('ota_restore', args=[self.domain])
        auth_header = _get_auth_header(self.web_user_username,
                                       self.web_user_password)
        client = Client(HTTP_AUTHORIZATION=auth_header)
        device_id = "foo"

        # get the seq id before the change is published
        kafka_seq = get_topic_offset(topics.SYNCLOG_SQL)

        resp = client.get(restore_uri,
                          data={
                              'as': self.username,
                              "device_id": device_id
                          },
                          follow=True)
        self.assertEqual(resp.status_code, 200)

        pillow = get_user_sync_history_pillow()
        pillow.process_changes(since=kafka_seq, forever=False)

        restored_as_user = CommCareUser.get_by_username(self.username)

        self.assertEqual(len(restored_as_user.devices), 1)
        self.assertEqual(restored_as_user.devices[0].device_id, device_id)
Beispiel #7
0
    def test_app_pillow_kafka(self):
        consumer = get_test_kafka_consumer(topics.APP)
        # have to get the seq id before the change is processed
        kafka_seq = get_topic_offset(topics.APP)
        couch_seq = get_current_seq(Application.get_db())

        app_name = 'app-{}'.format(uuid.uuid4().hex)
        app = self._create_app(app_name)

        app_db_pillow = get_application_db_kafka_pillow('test_app_db_pillow')
        app_db_pillow.process_changes(couch_seq, forever=False)

        # confirm change made it to kafka
        message = next(consumer)
        change_meta = change_meta_from_kafka_message(message.value)
        self.assertEqual(app._id, change_meta.document_id)
        self.assertEqual(self.domain, change_meta.domain)

        # send to elasticsearch
        app_pillow = get_app_to_elasticsearch_pillow()
        app_pillow.process_changes(since=kafka_seq, forever=False)
        self.es.indices.refresh(APP_INDEX_INFO.index)

        # confirm change made it to elasticserach
        results = AppES().run()
        self.assertEqual(1, results.total)
        app_doc = results.hits[0]
        self.assertEqual(self.domain, app_doc['domain'])
        self.assertEqual(app['_id'], app_doc['_id'])
        self.assertEqual(app_name, app_doc['name'])
Beispiel #8
0
    def test_sql_sms_pillow(self, mock_do_publish):
        mock_do_publish.return_value = True
        consumer = get_test_kafka_consumer(topics.SMS)

        # get the seq id before the change is published
        kafka_seq = get_topic_offset(topics.SMS)

        # create an sms
        self._create_sms()
        sms_json = self._to_json(self.sms_dict, self.sms)

        # test serialization
        self.assertEqual(self.sms.to_json(), sms_json)

        # publish the change and confirm it gets to kafka
        self.sms.publish_change()
        message = consumer.next()
        change_meta = change_meta_from_kafka_message(message.value)
        self.assertEqual(self.sms.couch_id, change_meta.document_id)
        self.assertEqual(self.domain, change_meta.domain)

        # send to elasticsearch
        sms_pillow = get_sql_sms_pillow('SqlSMSPillow')
        sms_pillow.process_changes(since=kafka_seq, forever=False)
        self.elasticsearch.indices.refresh(SMS_INDEX_INFO.index)

        # confirm change made it to elasticserach
        results = SMSES().run()
        self.assertEqual(1, results.total)
        sms_doc = results.hits[0]
        self.assertEqual(sms_doc, sms_json)
Beispiel #9
0
    def test_sql_sms_pillow(self, mock_do_publish):
        mock_do_publish.return_value = True
        consumer = get_test_kafka_consumer(topics.SMS)

        # get the seq id before the change is published
        kafka_seq = get_topic_offset(topics.SMS)

        # create an sms
        sms_and_dict = create_fake_sms(self.domain)
        self.sms = sms_and_dict.sms
        sms_json = self._to_json(sms_and_dict.sms_dict, self.sms)

        # test serialization
        self.assertEqual(self.sms.to_json(), sms_json)

        # publish the change and confirm it gets to kafka
        self.sms.publish_change()
        message = next(consumer)
        change_meta = change_meta_from_kafka_message(message.value)
        self.assertEqual(self.sms.couch_id, change_meta.document_id)
        self.assertEqual(self.domain, change_meta.domain)

        # send to elasticsearch
        sms_pillow = get_sql_sms_pillow('SqlSMSPillow')
        sms_pillow.process_changes(since=kafka_seq, forever=False)
        self.elasticsearch.indices.refresh(SMS_INDEX_INFO.index)

        # confirm change made it to elasticserach
        results = SMSES().run()
        self.assertEqual(1, results.total)
        sms_doc = results.hits[0]
        self.assertEqual(sms_doc, sms_json)
Beispiel #10
0
    def test_ledger_pillow(self):
        factory = CaseFactory(domain=self.domain)
        case = factory.create_case()

        consumer = get_test_kafka_consumer(topics.LEDGER)
        # have to get the seq id before the change is processed
        kafka_seq = get_topic_offset(topics.LEDGER)

        from corehq.apps.commtrack.tests.util import get_single_balance_block
        from corehq.apps.hqcase.utils import submit_case_blocks
        submit_case_blocks(
            [get_single_balance_block(case.case_id, self.product_id, 100)],
            self.domain)

        ref = UniqueLedgerReference(case.case_id, 'stock', self.product_id)
        # confirm change made it to kafka
        message = consumer.next()
        change_meta = change_meta_from_kafka_message(message.value)
        if should_use_sql_backend(self.domain):
            self.assertEqual(ref.as_id(), change_meta.document_id)
        else:
            from corehq.apps.commtrack.models import StockState
            state = StockState.objects.all()
            self.assertEqual(1, len(state))
            self.assertEqual(state[0].pk, change_meta.document_id)  #
        self.assertEqual(self.domain, change_meta.domain)

        # send to elasticsearch
        self.pillow.process_changes(since=kafka_seq, forever=False)
        self.elasticsearch.indices.refresh(LEDGER_INDEX_INFO.index)

        # confirm change made it to elasticserach
        self._assert_ledger_in_es(ref)
Beispiel #11
0
    def test_ledger_pillow(self):
        factory = CaseFactory(domain=self.domain)
        case = factory.create_case()

        consumer = get_test_kafka_consumer(topics.LEDGER)
        # have to get the seq id before the change is processed
        kafka_seq = get_topic_offset(topics.LEDGER)

        from corehq.apps.commtrack.tests.util import get_single_balance_block
        from corehq.apps.hqcase.utils import submit_case_blocks
        submit_case_blocks([
            get_single_balance_block(case.case_id, self.product_id, 100)],
            self.domain
        )

        ref = UniqueLedgerReference(case.case_id, 'stock', self.product_id)
        # confirm change made it to kafka
        message = consumer.next()
        change_meta = change_meta_from_kafka_message(message.value)
        if should_use_sql_backend(self.domain):
            self.assertEqual(ref.as_id(), change_meta.document_id)
        else:
            from corehq.apps.commtrack.models import StockState
            state = StockState.objects.all()
            self.assertEqual(1, len(state))
            self.assertEqual(state[0].pk, change_meta.document_id)  #
        self.assertEqual(self.domain, change_meta.domain)

        # send to elasticsearch
        self.pillow.process_changes(since=kafka_seq, forever=False)
        self.elasticsearch.indices.refresh(LEDGER_INDEX_INFO.index)

        # confirm change made it to elasticserach
        self._assert_ledger_in_es(ref)
    def test_pillow(self):
        from corehq.apps.change_feed.topics import get_topic_offset
        from corehq.pillows.synclog import get_user_sync_history_pillow
        consumer = get_test_kafka_consumer(topics.SYNCLOG_SQL)
        # get the seq id before the change is published
        kafka_seq = get_topic_offset(topics.SYNCLOG_SQL)

        # make sure user has empty reporting-metadata before a sync
        self.assertEqual(self.ccuser.reporting_metadata.last_syncs, [])

        # do a sync
        synclog = SyncLog(domain=self.domain.name, user_id=self.ccuser._id,
                          date=datetime.datetime(2015, 7, 1, 0, 0))
        synclog.save()

        # make sure kafka change updates the user with latest sync info
        message = next(consumer)
        change_meta = change_meta_from_kafka_message(message.value)
        synclog = self._get_latest_synclog()
        self.assertEqual(change_meta.document_id, synclog._id)
        self.assertEqual(change_meta.domain, self.domain.name)

        # make sure processor updates the user correctly
        pillow = get_user_sync_history_pillow()
        pillow.process_changes(since=kafka_seq, forever=False)
        ccuser = CommCareUser.get(self.ccuser._id)
        self.assertEqual(len(ccuser.reporting_metadata.last_syncs), 1)
        self.assertEqual(ccuser.reporting_metadata.last_syncs[0].sync_date, synclog.date)
        self.assertEqual(ccuser.reporting_metadata.last_sync_for_user.sync_date, synclog.date)
Beispiel #13
0
    def test_prune_autogenerated_builds(self):
        kafka_seq = get_topic_offset(topics.APP)
        couch_seq = get_current_seq(Application.get_db())
        # Build #1, manually generated
        app = self._create_app('test-prune-app')
        build1 = app.make_build()
        build1.save()
        self.assertFalse(build1.is_auto_generated)

        # Build #2, auto-generated
        app.save()
        build2 = make_async_build(app, 'someone')

        # Build #3, manually generated
        app.save()
        build3 = app.make_build()
        build3.save()

        # All 3 builds should show up in ES
        self.refresh_elasticsearch(kafka_seq, couch_seq)
        build_ids_in_es = AppES().domain(self.domain).is_build().values_list('_id', flat=True)
        self.assertItemsEqual(build_ids_in_es, [build1._id, build2._id, build3._id])

        # prune, which should delete the autogenerated build
        prune_auto_generated_builds(self.domain, app.id)

        # Build2 should no longer be in ES
        self.refresh_elasticsearch(kafka_seq, couch_seq)
        build_ids_in_es = AppES().domain(self.domain).is_build().values_list('_id', flat=True)
        self.assertItemsEqual(build_ids_in_es, [build1._id, build3._id])
Beispiel #14
0
    def test_prune_autogenerated_builds(self):
        kafka_seq = get_topic_offset(topics.APP)
        couch_seq = get_current_seq(Application.get_db())
        # Build #1, manually generated
        app = self._create_app('test-prune-app')
        build1 = app.make_build()
        build1.save()
        self.assertFalse(build1.is_auto_generated)

        # Build #2, auto-generated
        app.save()
        autogenerate_build(app, 'username')

        # Build #3, manually generated
        app.save()
        build3 = app.make_build()
        build3.save()

        # All 3 builds should show up in ES
        self.refresh_elasticsearch(kafka_seq, couch_seq)
        build_ids_in_es = AppES().domain(self.domain).is_build().values_list(
            '_id', flat=True)
        self.assertEqual(len(build_ids_in_es), 3)

        # prune, which should delete the autogenerated build
        prune_auto_generated_builds(self.domain, app.id)

        # Build2 should no longer be in ES
        self.refresh_elasticsearch(kafka_seq, couch_seq)
        build_ids_in_es = AppES().domain(self.domain).is_build().values_list(
            '_id', flat=True)
        self.assertItemsEqual(build_ids_in_es, [build1._id, build3._id])
Beispiel #15
0
    def test_hard_delete_app(self):
        consumer = get_test_kafka_consumer(topics.APP)
        # have to get the seq id before the change is processed
        kafka_seq = get_topic_offset(topics.APP)
        couch_seq = get_current_seq(Application.get_db())

        app = self._create_app('test_hard_deleted_app', cleanup=False)
        app_db_pillow = get_application_db_kafka_pillow('test_app_db_pillow')
        app_db_pillow.process_changes(couch_seq, forever=False)

        # confirm change made it to kafka
        message = next(consumer)
        change_meta = change_meta_from_kafka_message(message.value)
        self.assertEqual(app._id, change_meta.document_id)
        self.assertEqual(self.domain, change_meta.domain)

        # send to elasticsearch
        app_pillow = get_app_to_elasticsearch_pillow()
        app_pillow.process_changes(since=kafka_seq, forever=False)
        self.es.indices.refresh(APP_INDEX_INFO.index)

        # confirm change made it to elasticserach
        results = AppES().run()
        self.assertEqual(1, results.total)

        couch_seq = get_current_seq(Application.get_db())
        kafka_seq = get_topic_offset(topics.APP)

        app.delete()
        app_db_pillow.process_changes(couch_seq, forever=False)

        # confirm change made it to kafka. Would raise StopIteration otherwise
        next(consumer)

        # send to elasticsearch
        app_pillow = get_app_to_elasticsearch_pillow()
        app_pillow.process_changes(since=kafka_seq, forever=False)
        self.es.indices.refresh(APP_INDEX_INFO.index)

        # confirm deletion made it to elasticserach
        results = AppES().run()
        self.assertEqual(0, results.total)
    def test_ledger_pillow_sql(self):
        factory = CaseFactory(domain=self.domain)
        case = factory.create_case()

        consumer = get_test_kafka_consumer(topics.LEDGER)
        # have to get the seq id before the change is processed
        kafka_seq = get_topic_offset(topics.LEDGER)

        from corehq.apps.commtrack.tests.util import get_single_balance_block
        from corehq.apps.hqcase.utils import submit_case_blocks
        submit_case_blocks([
            get_single_balance_block(case.case_id, self.product_id, 100)],
            self.domain
        )

        ref = UniqueLedgerReference(case.case_id, 'stock', self.product_id)
        # confirm change made it to kafka
        message = consumer.next()
        change_meta = change_meta_from_kafka_message(message.value)
        if should_use_sql_backend(self.domain):
            self.assertEqual(ref.as_id(), change_meta.document_id)
        else:
            from corehq.apps.commtrack.models import StockState
            state = StockState.objects.all()
            self.assertEqual(1, len(state))
            self.assertEqual(state[0].pk, change_meta.document_id)  #
        self.assertEqual(self.domain, change_meta.domain)

        # send to elasticsearch
        self.pillow.process_changes(since=kafka_seq, forever=False)
        self.elasticsearch.indices.refresh(LEDGER_INDEX_INFO.index)

        # confirm change made it to elasticserach
        results = self.elasticsearch.search(
            LEDGER_INDEX_INFO.index,
            LEDGER_INDEX_INFO.type, body={
                "query": {
                    "bool": {
                        "must": [{
                            "match_all": {}
                        }]
                    }
                }
            }
        )
        self.assertEqual(1, results['hits']['total'])
        ledger_doc = results['hits']['hits'][0]['_source']
        self.assertEqual(self.domain, ledger_doc['domain'])
        self.assertEqual(ref.case_id, ledger_doc['case_id'])
        self.assertEqual(ref.section_id, ledger_doc['section_id'])
        self.assertEqual(ref.entry_id, ledger_doc['entry_id'])
    def _make_and_test_user_kafka_pillow(self, username):
        # make a user
        user = CommCareUser.create(TEST_DOMAIN, username, 'secret', None, None)

        # send to kafka
        since = get_topic_offset(topics.COMMCARE_USER)
        producer.send_change(topics.COMMCARE_USER, _user_to_change_meta(user))

        # send to elasticsearch
        pillow = get_user_pillow_old()
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)
        self._verify_user_in_es(username)
        return user
Beispiel #18
0
    def test_kafka_user_pillow_deletion(self):
        user = self._make_and_test_user_kafka_pillow('test-kafka-user_deletion')
        # soft delete
        user.retire()

        # send to kafka
        since = get_topic_offset(topics.COMMCARE_USER)
        producer.send_change(topics.COMMCARE_USER, _user_to_change_meta(user))

        # send to elasticsearch
        pillow = get_user_pillow_old(skip_ucr=True)
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)
        self.assertEqual(0, UserES().run().total)
Beispiel #19
0
    def test_kafka_user_pillow_deletion(self):
        user = self._make_and_test_user_kafka_pillow('test-kafka-user_deletion')
        # soft delete
        user.retire()

        # send to kafka
        since = get_topic_offset(topics.COMMCARE_USER)
        producer.send_change(topics.COMMCARE_USER, _user_to_change_meta(user))

        # send to elasticsearch
        pillow = get_user_pillow_old(skip_ucr=True)
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)
        self.assertEqual(0, UserES().run().total)
Beispiel #20
0
    def _make_and_test_user_kafka_pillow(self, username):
        # make a user
        user = CommCareUser.create(TEST_DOMAIN, username, 'secret')

        # send to kafka
        since = get_topic_offset(topics.COMMCARE_USER)
        producer.send_change(topics.COMMCARE_USER, _user_to_change_meta(user))

        # send to elasticsearch
        pillow = get_user_pillow_old()
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)
        self._verify_user_in_es(username)
        return user
Beispiel #21
0
    def test_pillow_deletion(self):
        user_id, group = self.test_pillow()
        group.soft_delete()

        # send to kafka
        since = get_topic_offset(topics.GROUP)
        producer.send_change(topics.GROUP, _group_to_change_meta(group.to_json()))

        pillow = get_group_pillow()
        pillow.process_changes(since=since, forever=False)

        # confirm removed in elasticsearch
        self.es_client.indices.refresh(USER_INDEX)
        _assert_es_user_and_groups(self, self.es_client, user_id, [], [])
    def test_reverted_domain_pillow_deletion(self):
        domain_name = 'domain-pillow-delete'
        with drop_connected_signals(commcare_domain_post_save):
            domain = create_domain(domain_name)

        # send to kafka
        since = get_topic_offset(topics.DOMAIN)
        publish_domain_saved(domain)

        # send to elasticsearch
        pillow = get_domain_kafka_to_elasticsearch_pillow()
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)

        # verify there
        self._verify_domain_in_es(domain_name)

        domain_obj = Domain.get_by_name(domain_name)
        domain_obj.doc_type = 'Domain-DUPLICATE'
        domain_obj.save()

        # send to kafka
        since = get_topic_offset(topics.DOMAIN)
        publish_domain_saved(domain_obj)

        # undelete
        domain_obj = Domain.get_by_name(domain_name)
        domain_obj.doc_type = 'Domain'
        domain_obj.save()

        # process pillow changes
        pillow = get_domain_kafka_to_elasticsearch_pillow()
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)

        # confirm domain still exists
        self._verify_domain_in_es(domain_name)
Beispiel #23
0
    def test_kafka_user_pillow_deletion(self):
        user = self._make_and_test_user_kafka_pillow('test-kafka-user_deletion')
        # soft delete
        user.doc_type = '{}{}'.format(user.doc_type, DELETED_SUFFIX)
        user.save()

        # send to kafka
        since = get_topic_offset(document_types.COMMCARE_USER)
        producer.send_change(document_types.COMMCARE_USER, _user_to_change_meta(user))

        # send to elasticsearch
        pillow = get_user_pillow()
        pillow.process_changes(since={document_types.COMMCARE_USER: since}, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)
        self.assertEqual(0, UserES().run().total)
Beispiel #24
0
    def test_kafka_user_pillow_deletion(self):
        user = self._make_and_test_user_kafka_pillow(
            'test-kafka-user_deletion')
        # soft delete
        user.doc_type = '{}{}'.format(user.doc_type, DELETED_SUFFIX)
        user.save()

        # send to kafka
        since = get_topic_offset(topics.COMMCARE_USER)
        producer.send_change(topics.COMMCARE_USER, _user_to_change_meta(user))

        # send to elasticsearch
        pillow = get_user_pillow()
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)
        self.assertEqual(0, UserES().run().total)
    def test_kafka_domain_pillow_deletions(self):
        # run the other test to ensure domain is created and in ES
        self.test_kafka_domain_pillow()
        domain_obj = Domain.get_by_name('domain-pillowtest-kafka')
        domain_obj.doc_type = 'Domain-DUPLICATE'

        # send to kafka
        since = get_topic_offset(topics.DOMAIN)
        producer.send_change(topics.DOMAIN, _domain_to_change_meta(domain_obj))

        # send to elasticsearch
        pillow = get_domain_kafka_to_elasticsearch_pillow()
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)

        # ensure removed from ES
        self.assertEqual(0, DomainES().run().total)
    def test_kafka_domain_pillow(self):
        # make a domain
        domain_name = 'domain-pillowtest-kafka'
        with drop_connected_signals(commcare_domain_post_save):
            domain = create_domain(domain_name)

        # send to kafka
        since = get_topic_offset(topics.DOMAIN)
        producer.send_change(topics.DOMAIN, _domain_to_change_meta(domain))

        # send to elasticsearch
        pillow = get_domain_kafka_to_elasticsearch_pillow()
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)

        # verify there
        self._verify_domain_in_es(domain_name)
Beispiel #27
0
    def test_kafka_domain_pillow_deletions(self):
        # run the other test to ensure domain is created and in ES
        self.test_kafka_domain_pillow()
        domain_obj = Domain.get_by_name('domain-pillowtest-kafka')
        domain_obj.doc_type = 'Domain-DUPLICATE'

        # send to kafka
        since = get_topic_offset(topics.DOMAIN)
        producer.send_change(topics.DOMAIN, _domain_to_change_meta(domain_obj))

        # send to elasticsearch
        pillow = get_domain_kafka_to_elasticsearch_pillow()
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)

        # ensure removed from ES
        self.assertEqual(0, DomainES().run().total)
Beispiel #28
0
    def test_kafka_domain_pillow(self):
        # make a domain
        domain_name = 'domain-pillowtest-kafka'
        with drop_connected_signals(commcare_domain_post_save):
            domain = create_domain(domain_name)

        # send to kafka
        since = get_topic_offset(topics.DOMAIN)
        producer.send_change(topics.DOMAIN, _domain_to_change_meta(domain))

        # send to elasticsearch
        pillow = get_domain_kafka_to_elasticsearch_pillow()
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)

        # verify there
        self._verify_domain_in_es(domain_name)
    def test_pillow_form_processed(self):
        from corehq.apps.change_feed.topics import get_topic_offset
        from corehq.pillows.synclog import get_user_sync_history_pillow

        self.assertEqual(UserReportingMetadataStaging.objects.count(), 0)
        UserReportingMetadataStaging.add_submission(
            self.domain.name, self.ccuser._id, '123', None, None, {}, datetime.datetime.utcnow()
        )
        self.assertEqual(UserReportingMetadataStaging.objects.count(), 1)

        consumer = get_test_kafka_consumer(topics.SYNCLOG_SQL)
        # get the seq id before the change is published
        kafka_seq = get_topic_offset(topics.SYNCLOG_SQL)

        # make sure user has empty reporting-metadata before a sync
        ccuser = CommCareUser.get(self.ccuser._id)
        self.assertEqual(ccuser.reporting_metadata.last_syncs, [])

        # do a sync
        synclog = SimplifiedSyncLog(domain=self.domain.name, user_id=self.ccuser._id,
                          date=datetime.datetime(2015, 7, 1, 0, 0), app_id='123')
        synclog.save()

        # make sure kafka change updates the user with latest sync info
        message = next(consumer)
        change_meta = change_meta_from_kafka_message(message.value)
        synclog = self._get_latest_synclog()
        self.assertEqual(change_meta.document_id, synclog._id)
        self.assertEqual(change_meta.domain, self.domain.name)

        self.assertEqual(UserReportingMetadataStaging.objects.count(), 1)

        # make sure processor updates the user correctly
        pillow = get_user_sync_history_pillow()
        pillow.process_changes(since=kafka_seq)
        process_reporting_metadata_staging()
        ccuser = CommCareUser.get(self.ccuser._id)
        self.assertEqual(len(ccuser.reporting_metadata.last_syncs), 1)
        self.assertEqual(ccuser.reporting_metadata.last_syncs[0].sync_date, synclog.date)
        self.assertEqual(ccuser.reporting_metadata.last_sync_for_user.sync_date, synclog.date)
Beispiel #30
0
    def test_pillow(self):
        user_id = uuid.uuid4().hex
        domain = 'dbtest-group-user'
        _create_es_user(self.es_client, user_id, domain)
        _assert_es_user_and_groups(self, self.es_client, user_id, None, None)

        # create and save a group
        group = Group(domain=domain, name='g1', users=[user_id])
        group.save()

        # send to kafka
        since = get_topic_offset(topics.GROUP)
        producer.send_change(topics.GROUP, _group_to_change_meta(group.to_json()))

        # process using pillow
        pillow = get_group_pillow()
        pillow.process_changes(since=since, forever=False)

        # confirm updated in elasticsearch
        self.es_client.indices.refresh(USER_INDEX)
        _assert_es_user_and_groups(self, self.es_client, user_id, [group._id], [group.name])
        return user_id, group
    def test_kafka_group_pillow(self):
        domain = uuid.uuid4().hex
        user_id = uuid.uuid4().hex

        # make a group
        group = Group(domain=domain, name='g1', users=[user_id])
        group.save()

        # send to kafka
        since = get_topic_offset(topics.GROUP)
        change_meta = change_meta_from_doc(
            document=group.to_json(),
            data_source_type=data_sources.SOURCE_COUCH,
            data_source_name=Group.get_db().dbname,
        )
        producer.send_change(topics.GROUP, change_meta)

        # send to elasticsearch
        pillow = get_group_pillow()
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(GROUP_INDEX_INFO.index)

        # verify there
        self._verify_group_in_es(group)
Beispiel #32
0
    def test_kafka_group_pillow(self):
        domain = uuid.uuid4().hex
        user_id = uuid.uuid4().hex

        # make a group
        group = Group(domain=domain, name='g1', users=[user_id])
        group.save()

        # send to kafka
        since = get_topic_offset(GROUP)
        change_meta = change_meta_from_doc(
            document=group.to_json(),
            data_source_type=data_sources.COUCH,
            data_source_name=Group.get_db().dbname,
        )
        producer.send_change(GROUP, change_meta)

        # send to elasticsearch
        pillow = get_group_pillow()
        pillow.process_changes(since={GROUP: since}, forever=False)
        self.elasticsearch.indices.refresh(GROUP_INDEX_INFO.index)

        # verify there
        self._verify_group_in_es(group)
Beispiel #33
0
 def get_latest_change_id(self):
     topic = self._get_single_topic_or_fail()
     return get_topic_offset(topic)
Beispiel #34
0
 def get_latest_offsets_as_checkpoint_value(self):
     try:
         topic = self._get_single_topic_or_fail()
         return str(get_topic_offset(topic))
     except ValueError:
         return self.get_latest_offsets()
Beispiel #35
0
 def __enter__(self):
     if len(self.topics) == 1:
         self.kafka_seq = get_topic_offset(self.topics[0])
     else:
         self.kafka_seq = get_multi_topic_offset(self.topics)
Beispiel #36
0
 def __enter__(self):
     if len(self.topics) == 1:
         self.kafka_seq = get_topic_offset(self.topics[0])
     else:
         self.kafka_seq = get_multi_topic_offset(self.topics)
Beispiel #37
0
 def _get_kafka_seq(self):
     return get_topic_offset(topics.FORM_SQL)
 def test_no_changes(self, mock_iter, mock_update):
     since = get_topic_offset(topics.DOMAIN)
     pillow = get_domain_kafka_to_elasticsearch_pillow()
     pillow.process_changes(since=since, forever=True)
     self.assertFalse(mock_update.called)
Beispiel #39
0
 def get_latest_change_id(self):
     topic = self._get_single_topic_or_fail()
     return get_topic_offset(topic)