Esempio n. 1
0
def publish_ledger_v2_deleted(domain, case_id, section_id, entry_id):
    from corehq.form_processor.parsers.ledgers.helpers import UniqueLedgerReference
    ref = UniqueLedgerReference(case_id=case_id,
                                section_id=section_id,
                                entry_id=entry_id)
    producer.send_change(topics.LEDGER,
                         change_meta_from_ledger_v2(ref, domain, True))
Esempio n. 2
0
    def test_unknown_user_pillow(self):
        FormProcessorTestUtils.delete_all_xforms()
        user_id = 'test-unknown-user'
        metadata = TestFormMetadata(domain=TEST_DOMAIN, user_id='test-unknown-user')
        form = get_form_ready_to_save(metadata)
        FormProcessorInterface(domain=TEST_DOMAIN).save_processed_models([form])

        # send to kafka
        topic = topics.FORM_SQL if settings.TESTS_SHOULD_USE_SQL_BACKEND else topics.FORM
        since = self._get_kafka_seq()
        producer.send_change(topic, _form_to_change_meta(form))

        # send to elasticsearch
        pillow = get_unknown_users_pillow()
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)

        # the default query doesn't include unknown users so should have no results
        self.assertEqual(0, UserES().run().total)
        # clear the default filters which hide unknown users
        user_es = UserES().remove_default_filters()
        results = user_es.run()
        self.assertEqual(1, results.total)
        user_doc = results.hits[0]
        self.assertEqual(TEST_DOMAIN, user_doc['domain'])
        self.assertEqual(user_id, user_doc['_id'])
        self.assertEqual('UnknownUser', user_doc['doc_type'])
Esempio n. 3
0
 def handle(self, stale_data_in_es_file, delimiter, skip_domains, *args,
            **options):
     changes = _iter_changes(stale_data_in_es_file,
                             skip_domains,
                             delimiter=delimiter)
     for topic, meta in changes:
         producer.send_change(topic, meta)
def publish_case_saved(case, send_post_save_signal=True):
    """
    Publish the change to kafka and run case post-save signals.
    """
    producer.send_change(topics.CASE_SQL, change_meta_from_sql_case(case))
    if send_post_save_signal:
        sql_case_post_save.send(case.__class__, case=case)
    def test_unknown_user_pillow(self):
        FormProcessorTestUtils.delete_all_xforms()
        user_id = 'test-unknown-user'
        metadata = TestFormMetadata(domain=TEST_DOMAIN,
                                    user_id='test-unknown-user')
        form = get_form_ready_to_save(metadata)
        FormProcessorInterface(domain=TEST_DOMAIN).save_processed_models(
            [form])

        # send to kafka
        topic = topics.FORM_SQL if settings.TESTS_SHOULD_USE_SQL_BACKEND else topics.FORM
        since = self._get_kafka_seq()
        producer.send_change(topic, _form_to_change_meta(form))

        # send to elasticsearch
        pillow = get_xform_pillow()
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)

        # the default query doesn't include unknown users so should have no results
        self.assertEqual(0, UserES().run().total)
        # clear the default filters which hide unknown users
        user_es = UserES().remove_default_filters()
        results = user_es.run()
        self.assertEqual(1, results.total)
        user_doc = results.hits[0]
        self.assertEqual(TEST_DOMAIN, user_doc['domain'])
        self.assertEqual(user_id, user_doc['_id'])
        self.assertEqual('UnknownUser', user_doc['doc_type'])
Esempio n. 6
0
    def test_xform_pillow_couch(self):
        form = self._make_form()
        kafka_seq = self._get_kafka_seq()
        producer.send_change(topics.FORM, doc_to_change(form.to_json()).metadata)
        self.assertFalse(self.app.has_submissions)

        self.pillow.process_changes(since=kafka_seq, forever=False)
        self.assertTrue(Application.get(self.app._id).has_submissions)
Esempio n. 7
0
    def test_xform_pillow_couch(self):
        form = self._make_form()
        kafka_seq = self._get_kafka_seq()
        producer.send_change(topics.FORM, doc_to_change(form.to_json()).metadata)
        self.assertFalse(self.app.has_submissions)

        self.pillow.process_changes(since=kafka_seq, forever=False)
        self.assertTrue(Application.get(self.app._id).has_submissions)
Esempio n. 8
0
 def handle(self, pillow, **options):
     self.pillow = pillow
     for errors in self.get_next_errors():
         for error in errors:
             if error.change_object.metadata:
                 producer.send_change(
                     error.change_object.metadata.data_source_type,
                     error.change_object.metadata)
def publish_case_deleted(domain, case_id):
    producer.send_change(topics.CASE_SQL, ChangeMeta(
        document_id=case_id,
        data_source_type=data_sources.CASE_SQL,
        data_source_name='case-sql',  # todo: this isn't really needed.
        document_type='CommCareCase-Deleted',
        domain=domain,
        is_deletion=True,
    ))
def publish_form_deleted(domain, form_id):
    producer.send_change(topics.FORM_SQL, ChangeMeta(
        document_id=form_id,
        data_source_type=data_sources.FORM_SQL,
        data_source_name='form-sql',
        document_type='XFormInstance-Deleted',
        domain=domain,
        is_deletion=True,
    ))
def _publish_ledgers_from_form(domain, form):
    from corehq.form_processor.parsers.ledgers.form import get_all_stock_report_helpers_from_form
    unique_references = {
        transaction.ledger_reference
        for helper in get_all_stock_report_helpers_from_form(form)
        for transaction in helper.transactions
    }
    for ledger_reference in unique_references:
        producer.send_change(topics.LEDGER, _change_meta_from_ledger_reference(domain, ledger_reference))
Esempio n. 12
0
def process_pillow_retry(error_doc):
    pillow_name_or_class = error_doc.pillow
    try:
        pillow = get_pillow_by_name(pillow_name_or_class)
    except PillowNotFoundError:
        pillow = None

    if not pillow:
        notify_error((
            "Could not find pillowtop class '%s' while attempting a retry. "
            "If this pillow was recently deleted then this will be automatically cleaned up eventually. "
            "If not, then this should be looked into."
        ) % pillow_name_or_class)
        try:
            error_doc.total_attempts = const.PILLOW_RETRY_MULTI_ATTEMPTS_CUTOFF + 1
            error_doc.save()
        finally:
            return

    change = error_doc.change_object
    delete_all_for_doc = False
    try:
        change_metadata = change.metadata
        if change_metadata:
            document_store = get_document_store(
                data_source_type=change_metadata.data_source_type,
                data_source_name=change_metadata.data_source_name,
                domain=change_metadata.domain,
                load_source="pillow_retry",
            )
            change.document_store = document_store
        if isinstance(pillow.get_change_feed(), CouchChangeFeed):
            pillow.process_change(change)
        else:
            if change_metadata.data_source_type in ('couch', 'sql'):
                data_source_type = change_metadata.data_source_type
            else:
                # legacy metadata will have other values for non-sql
                # can remove this once all legacy errors have been processed
                data_source_type = 'sql'
            producer.send_change(
                get_topic_for_doc_type(
                    change_metadata.document_type,
                    data_source_type
                ),
                change_metadata
            )
            delete_all_for_doc = True
    except Exception:
        ex_type, ex_value, ex_tb = sys.exc_info()
        error_doc.add_attempt(ex_value, ex_tb)
        error_doc.save()
    else:
        if delete_all_for_doc:
            PillowError.objects.filter(doc_id=error_doc.doc_id).delete()
        else:
            error_doc.delete()
Esempio n. 13
0
def _publish_ledgers_from_form(domain, form):
    from corehq.form_processor.parsers.ledgers.form import get_all_stock_report_helpers_from_form
    unique_references = {
        transaction.ledger_reference
        for helper in get_all_stock_report_helpers_from_form(form)
        for transaction in helper.transactions
    }
    for ledger_reference in unique_references:
        producer.send_change(topics.LEDGER, _change_meta_from_ledger_reference(domain, ledger_reference))
Esempio n. 14
0
def publish_case_deleted(domain, case_id):
    producer.send_change(topics.CASE_SQL, ChangeMeta(
        document_id=case_id,
        data_source_type=data_sources.CASE_SQL,
        data_source_name='case-sql',  # todo: this isn't really needed.
        document_type='CommCareCase-Deleted',
        domain=domain,
        is_deletion=True,
    ))
Esempio n. 15
0
def publish_form_deleted(domain, form_id):
    producer.send_change(topics.FORM_SQL, ChangeMeta(
        document_id=form_id,
        data_source_type=data_sources.FORM_SQL,
        data_source_name='form-sql',
        document_type='XFormInstance-Deleted',
        domain=domain,
        is_deletion=True,
    ))
Esempio n. 16
0
def publish_case_deleted(domain, case_id):
    producer.send_change(topics.CASE_SQL, ChangeMeta(
        document_id=case_id,
        data_source_type=data_sources.SOURCE_SQL,
        data_source_name=data_sources.CASE_SQL,
        document_type='CommCareCase-Deleted',
        domain=domain,
        is_deletion=True,
    ))
 def handle(self, pillow, **options):
     self.pillow = pillow
     for errors in self.get_next_errors():
         for error in errors:
             if error.change_object.metadata:
                 producer.send_change(
                     error.change_object.metadata.data_source_type,
                     error.change_object.metadata
                 )
def publish_location_saved(domain, location_id, is_deletion=False):
    change_meta = ChangeMeta(
        document_id=location_id,
        data_source_type='location',
        data_source_name='location',
        document_type=LOCATION_DOC_TYPE,
        domain=domain,
        is_deletion=is_deletion,
    )
    producer.send_change(topics.LOCATION, change_meta)
Esempio n. 19
0
def publish_location_saved(domain, location_id, is_deletion=False):
    change_meta = ChangeMeta(
        document_id=location_id,
        data_source_type='location',
        data_source_name='location',
        document_type=LOCATION_DOC_TYPE,
        domain=domain,
        is_deletion=is_deletion,
    )
    producer.send_change(topics.LOCATION, change_meta)
Esempio n. 20
0
def publish_case_deleted(domain, case_id):
    producer.send_change(topics.CASE_SQL, ChangeMeta(
        document_id=case_id,
        data_source_type=data_sources.CASE_SQL,
        data_source_name='case-sql',  # todo: this isn't really needed.
        document_type='CommCareCaseSql',  # todo: should this be the same as the couch models?
        document_subtype=None,  # todo: does this need a value?
        domain=domain,
        is_deletion=True,
    ))
def publish_location_saved(domain, location_id, is_deletion=False):
    from corehq.apps.change_feed import data_sources
    change_meta = ChangeMeta(
        document_id=location_id,
        data_source_type=data_sources.SOURCE_SQL,
        data_source_name=data_sources.LOCATION,
        document_type=LOCATION_DOC_TYPE,
        domain=domain,
        is_deletion=is_deletion,
    )
    producer.send_change(topics.LOCATION, change_meta)
def publish_case_deleted(domain, case_id):
    producer.send_change(
        topics.CASE_SQL,
        ChangeMeta(
            document_id=case_id,
            data_source_type=data_sources.SOURCE_SQL,
            data_source_name=data_sources.CASE_SQL,
            document_type='CommCareCase-Deleted',
            domain=domain,
            is_deletion=True,
        ))
Esempio n. 23
0
def process_pillow_retry(error_doc):
    pillow_name_or_class = error_doc.pillow
    try:
        pillow = get_pillow_by_name(pillow_name_or_class)
    except PillowNotFoundError:
        pillow = None

    if not pillow:
        notify_error((
            "Could not find pillowtop class '%s' while attempting a retry. "
            "If this pillow was recently deleted then this will be automatically cleaned up eventually. "
            "If not, then this should be looked into.") % pillow_name_or_class)
        try:
            error_doc.total_attempts = const.PILLOW_RETRY_MULTI_ATTEMPTS_CUTOFF + 1
            error_doc.save()
        finally:
            return

    change = error_doc.change_object
    delete_all_for_doc = False
    try:
        change_metadata = change.metadata
        if change_metadata:
            document_store = get_document_store(
                data_source_type=change_metadata.data_source_type,
                data_source_name=change_metadata.data_source_name,
                domain=change_metadata.domain,
                load_source="pillow_retry",
            )
            change.document_store = document_store
        if isinstance(pillow.get_change_feed(), CouchChangeFeed):
            pillow.process_change(change)
        else:
            if change_metadata.data_source_type in ('couch', 'sql'):
                data_source_type = change_metadata.data_source_type
            else:
                # legacy metadata will have other values for non-sql
                # can remove this once all legacy errors have been processed
                data_source_type = 'sql'
            producer.send_change(
                get_topic_for_doc_type(change_metadata.document_type,
                                       data_source_type), change_metadata)
            delete_all_for_doc = True
    except Exception:
        ex_type, ex_value, ex_tb = sys.exc_info()
        error_doc.add_attempt(ex_value, ex_tb)
        error_doc.save()
    else:
        if delete_all_for_doc:
            PillowError.objects.filter(doc_id=error_doc.doc_id).delete()
        else:
            error_doc.delete()
Esempio n. 24
0
    def test_kafka_user_pillow_deletion(self):
        user = self._make_and_test_user_kafka_pillow('test-kafka-user_deletion')
        # soft delete
        user.retire()

        # send to kafka
        since = get_topic_offset(topics.COMMCARE_USER)
        producer.send_change(topics.COMMCARE_USER, _user_to_change_meta(user))

        # send to elasticsearch
        pillow = get_user_pillow_old(skip_ucr=True)
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)
        self.assertEqual(0, UserES().run().total)
Esempio n. 25
0
    def test_pillow_deletion(self):
        user_id, group = self.test_pillow()
        group.soft_delete()

        # send to kafka
        since = get_topic_offset(topics.GROUP)
        producer.send_change(topics.GROUP, _group_to_change_meta(group.to_json()))

        pillow = get_group_pillow()
        pillow.process_changes(since=since, forever=False)

        # confirm removed in elasticsearch
        self.es_client.indices.refresh(USER_INDEX)
        _assert_es_user_and_groups(self, self.es_client, user_id, [], [])
Esempio n. 26
0
    def _make_and_test_user_kafka_pillow(self, username):
        # make a user
        user = CommCareUser.create(TEST_DOMAIN, username, 'secret')

        # send to kafka
        since = get_current_kafka_seq(document_types.COMMCARE_USER)
        producer.send_change(document_types.COMMCARE_USER, _user_to_change_meta(user))

        # send to elasticsearch
        pillow = get_user_pillow()
        pillow.process_changes(since={document_types.COMMCARE_USER: since}, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)
        self._verify_user_in_es(username)
        return user
Esempio n. 27
0
    def _make_and_test_user_kafka_pillow(self, username):
        # make a user
        user = CommCareUser.create(TEST_DOMAIN, username, 'secret', None, None)

        # send to kafka
        since = get_topic_offset(topics.COMMCARE_USER)
        producer.send_change(topics.COMMCARE_USER, _user_to_change_meta(user))

        # send to elasticsearch
        pillow = get_user_pillow_old()
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)
        self._verify_user_in_es(username)
        return user
Esempio n. 28
0
    def test_pillow_deletion(self):
        user_id, group = self.test_pillow()
        group.soft_delete()

        # send to kafka
        since = get_current_kafka_seq(GROUP)
        producer.send_change(GROUP, _group_to_change_meta(group.to_json()))

        pillow = get_group_to_user_pillow()
        pillow.process_changes(since=since, forever=False)

        # confirm removed in elasticsearch
        self.es_client.indices.refresh(USER_INDEX)
        _assert_es_user_and_groups(self, self.es_client, user_id, [], [])
Esempio n. 29
0
    def test_kafka_user_pillow_deletion(self):
        user = self._make_and_test_user_kafka_pillow('test-kafka-user_deletion')
        # soft delete
        user.retire()

        # send to kafka
        since = get_topic_offset(topics.COMMCARE_USER)
        producer.send_change(topics.COMMCARE_USER, _user_to_change_meta(user))

        # send to elasticsearch
        pillow = get_user_pillow_old(skip_ucr=True)
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)
        self.assertEqual(0, UserES().run().total)
Esempio n. 30
0
    def test_kafka_user_pillow_deletion(self):
        user = self._make_and_test_user_kafka_pillow('test-kafka-user_deletion')
        # soft delete
        user.doc_type = '{}{}'.format(user.doc_type, DELETED_SUFFIX)
        user.save()

        # send to kafka
        since = get_current_kafka_seq(document_types.COMMCARE_USER)
        producer.send_change(document_types.COMMCARE_USER, _user_to_change_meta(user))

        # send to elasticsearch
        pillow = get_user_pillow()
        pillow.process_changes(since={document_types.COMMCARE_USER: since}, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)
        self.assertEqual(0, UserES().run().total)
def _publish_ledgers_from_form(domain, form):
    from corehq.form_processor.parsers.ledgers.form import get_all_stock_report_helpers_from_form
    unique_references = {
        transaction.ledger_reference
        for helper in get_all_stock_report_helpers_from_form(form)
        for transaction in helper.transactions
    }
    if form.to_json()['backend_id'] == 'sql':
        data_source_name = data_sources.LEDGER_V2
    else:
        data_source_name = data_sources.LEDGER_V1
    for ledger_reference in unique_references:
        meta = _change_meta_from_ledger_reference(domain, ledger_reference,
                                                  data_source_name)
        producer.send_change(topics.LEDGER, meta)
Esempio n. 32
0
    def test_kafka_user_pillow_deletion(self):
        user = self._make_and_test_user_kafka_pillow(
            'test-kafka-user_deletion')
        # soft delete
        user.doc_type = '{}{}'.format(user.doc_type, DELETED_SUFFIX)
        user.save()

        # send to kafka
        since = get_topic_offset(topics.COMMCARE_USER)
        producer.send_change(topics.COMMCARE_USER, _user_to_change_meta(user))

        # send to elasticsearch
        pillow = get_user_pillow()
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)
        self.assertEqual(0, UserES().run().total)
Esempio n. 33
0
    def test_kafka_domain_pillow_deletions(self):
        # run the other test to ensure domain is created and in ES
        self.test_kafka_domain_pillow()
        domain_obj = Domain.get_by_name('domain-pillowtest-kafka')
        domain_obj.doc_type = 'Domain-DUPLICATE'

        # send to kafka
        since = get_current_kafka_seq(document_types.DOMAIN)
        producer.send_change(document_types.DOMAIN, _domain_to_change_meta(domain_obj))

        # send to elasticsearch
        pillow = get_domain_kafka_to_elasticsearch_pillow()
        pillow.process_changes(since={document_types.DOMAIN: since}, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)

        # ensure removed from ES
        self.assertEqual(0, DomainES().run().total)
Esempio n. 34
0
    def _test_process_doc_from_couch(self, datetime_mock, pillow):
        datetime_mock.utcnow.return_value = self.fake_time_now
        sample_doc, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now)

        # make sure case is in DB
        case = CommCareCase.wrap(sample_doc)
        with drop_connected_signals(case_post_save):
            case.save()

        # send to kafka
        since = self.pillow.get_change_feed().get_latest_offsets()
        producer.send_change(topics.CASE, doc_to_change(sample_doc).metadata)

        # run pillow and check changes
        pillow.process_changes(since=since, forever=False)
        self._check_sample_doc_state(expected_indicators)
        case.delete()
Esempio n. 35
0
    def test_kafka_domain_pillow_deletions(self):
        # run the other test to ensure domain is created and in ES
        self.test_kafka_domain_pillow()
        domain_obj = Domain.get_by_name('domain-pillowtest-kafka')
        domain_obj.doc_type = 'Domain-DUPLICATE'

        # send to kafka
        since = get_topic_offset(topics.DOMAIN)
        producer.send_change(topics.DOMAIN, _domain_to_change_meta(domain_obj))

        # send to elasticsearch
        pillow = get_domain_kafka_to_elasticsearch_pillow()
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)

        # ensure removed from ES
        self.assertEqual(0, DomainES().run().total)
Esempio n. 36
0
    def test_kafka_domain_pillow(self):
        # make a domain
        domain_name = 'domain-pillowtest-kafka'
        with drop_connected_signals(commcare_domain_post_save):
            domain = create_domain(domain_name)

        # send to kafka
        since = get_topic_offset(topics.DOMAIN)
        producer.send_change(topics.DOMAIN, _domain_to_change_meta(domain))

        # send to elasticsearch
        pillow = get_domain_kafka_to_elasticsearch_pillow()
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)

        # verify there
        self._verify_domain_in_es(domain_name)
Esempio n. 37
0
    def test_kafka_domain_pillow(self):
        # make a domain
        domain_name = 'domain-pillowtest-kafka'
        with drop_connected_signals(commcare_domain_post_save):
            domain = create_domain(domain_name)

        # send to kafka
        since = get_current_kafka_seq(document_types.DOMAIN)
        producer.send_change(document_types.DOMAIN, _domain_to_change_meta(domain))

        # send to elasticsearch
        pillow = get_domain_kafka_to_elasticsearch_pillow()
        pillow.process_changes(since={document_types.DOMAIN: since}, forever=False)
        self.elasticsearch.indices.refresh(self.index_info.index)

        # verify there
        self._verify_domain_in_es(domain_name)
    def test_case_search_pillow(self):
        consumer = get_test_kafka_consumer(topics.CASE)
        kafka_seq = self._get_kafka_seq()

        case = self._make_case(case_properties={'foo': 'bar'})
        producer.send_change(topics.CASE, doc_to_change(case.to_json()).metadata)
        # confirm change made it to kafka
        message = next(consumer)
        change_meta = change_meta_from_kafka_message(message.value)
        self.assertEqual(case.case_id, change_meta.document_id)
        self.assertEqual(self.domain, change_meta.domain)

        # enable case search for domain
        with patch('corehq.pillows.case_search.domain_needs_search_index',
                   new=MagicMock(return_value=True)) as fake_case_search_enabled_for_domain:
            # send to elasticsearch
            self.pillow.process_changes(since=kafka_seq, forever=False)
            fake_case_search_enabled_for_domain.assert_called_with(self.domain)

        self._assert_case_in_es(self.domain, case)
Esempio n. 39
0
    def test_pillow(self):
        user_id = uuid.uuid4().hex
        domain = 'dbtest-group-user'
        _create_es_user(self.es_client, user_id, domain)
        _assert_es_user_and_groups(self, self.es_client, user_id, None, None)

        # create and save a group
        group = Group(domain=domain, name='g1', users=[user_id])
        group.save()

        # send to kafka
        since = get_topic_offset(topics.GROUP)
        producer.send_change(topics.GROUP, _group_to_change_meta(group.to_json()))

        # process using pillow
        pillow = get_group_pillow()
        pillow.process_changes(since=since, forever=False)

        # confirm updated in elasticsearch
        self.es_client.indices.refresh(USER_INDEX)
        _assert_es_user_and_groups(self, self.es_client, user_id, [group._id], [group.name])
        return user_id, group
Esempio n. 40
0
    def test_pillow(self):
        user_id = uuid.uuid4().hex
        domain = 'dbtest-group-user'
        _create_es_user(self.es_client, user_id, domain)
        _assert_es_user_and_groups(self, self.es_client, user_id, None, None)

        # create and save a group
        group = Group(domain=domain, name='g1', users=[user_id])
        group.save()

        # send to kafka
        since = get_current_kafka_seq(GROUP)
        producer.send_change(GROUP, _group_to_change_meta(group.to_json()))

        # process using pillow
        pillow = get_group_to_user_pillow()
        pillow.process_changes(since=since, forever=False)

        # confirm updated in elasticsearch
        self.es_client.indices.refresh(USER_INDEX)
        _assert_es_user_and_groups(self, self.es_client, user_id, [group._id], [group.name])
        return user_id, group
Esempio n. 41
0
    def test_kafka_group_pillow(self):
        domain = uuid.uuid4().hex
        user_id = uuid.uuid4().hex

        # make a group
        group = Group(domain=domain, name='g1', users=[user_id])
        group.save()

        # send to kafka
        since = get_topic_offset(GROUP)
        change_meta = change_meta_from_doc(
            document=group.to_json(),
            data_source_type=data_sources.COUCH,
            data_source_name=Group.get_db().dbname,
        )
        producer.send_change(GROUP, change_meta)

        # send to elasticsearch
        pillow = get_group_pillow()
        pillow.process_changes(since={GROUP: since}, forever=False)
        self.elasticsearch.indices.refresh(GROUP_INDEX_INFO.index)

        # verify there
        self._verify_group_in_es(group)
    def test_kafka_group_pillow(self):
        domain = uuid.uuid4().hex
        user_id = uuid.uuid4().hex

        # make a group
        group = Group(domain=domain, name='g1', users=[user_id])
        group.save()

        # send to kafka
        since = get_topic_offset(topics.GROUP)
        change_meta = change_meta_from_doc(
            document=group.to_json(),
            data_source_type=data_sources.SOURCE_COUCH,
            data_source_name=Group.get_db().dbname,
        )
        producer.send_change(topics.GROUP, change_meta)

        # send to elasticsearch
        pillow = get_group_pillow()
        pillow.process_changes(since=since, forever=False)
        self.elasticsearch.indices.refresh(GROUP_INDEX_INFO.index)

        # verify there
        self._verify_group_in_es(group)
Esempio n. 43
0
def _publish_cases_for_sql(domain, case_records):
    records_with_types = filter(lambda r: r.doc_subtype, case_records)
    records_with_no_types = filter(lambda r: not r.doc_subtype, case_records)
    # if we already had a type just publish as-is
    for record in records_with_types:
        producer.send_change(
            topics.CASE_SQL,
            _change_meta_for_sql_case(domain, record.doc_id,
                                      record.doc_subtype))

    # else lookup the type from the database
    for record_chunk in chunked(records_with_no_types, 10000):
        # databases will contain a mapping of shard database ids to case_ids in that DB
        id_chunk = [r.doc_id for r in record_chunk]
        databases = ShardAccessor.get_docs_by_database(id_chunk)
        for db, doc_ids in databases.items():
            results = CommCareCaseSQL.objects.using(db).filter(
                case_id__in=doc_ids, ).values_list('case_id', 'type')
            # make sure we found the same number of IDs
            assert len(results) == len(doc_ids)
            for case_id, case_type in results:
                producer.send_change(
                    topics.CASE_SQL,
                    _change_meta_for_sql_case(domain, case_id, case_type))
Esempio n. 44
0
 def _produce_changes(self, count):
     for i in range(count):
         meta = ChangeMeta(document_id=uuid.uuid4().hex,
             data_source_type='dummy-type', data_source_name='dummy-name')
         producer.send_change(topics.CASE, meta)
Esempio n. 45
0
def publish_synclog_saved(synclog):
    from corehq.apps.change_feed import topics
    producer.send_change(topics.SYNCLOG_SQL, change_meta_from_synclog(synclog))
Esempio n. 46
0
def publish_ledger_v1_saved(stock_state, deleted=False):
    producer.send_change(topics.LEDGER,
                         change_meta_from_ledger_v1(stock_state, deleted))
Esempio n. 47
0
def publish_form_saved(form):
    producer.send_change(topics.FORM_SQL, change_meta_from_sql_form(form))
Esempio n. 48
0
def publish_ledger_v2_saved(ledger_value):
    producer.send_change(
        topics.LEDGER,
        change_meta_from_ledger_v2(ledger_value.ledger_reference,
                                   ledger_value.domain))
Esempio n. 49
0
def publish_case_saved(case):
    producer.send_change(topics.CASE_SQL, change_meta_from_sql_case(case))
Esempio n. 50
0
def publish_form_saved(form):
    producer.send_change(topics.FORM_SQL, change_meta_from_sql_form(form))
Esempio n. 51
0
def publish_ledger_v2_saved(ledger_value):
    producer.send_change(topics.LEDGER, change_meta_from_ledger_v2(ledger_value))
Esempio n. 52
0
def _publish_forms_for_sql(domain, form_records):
    for record in form_records:
        producer.send_change(topics.FORM_SQL,
                             _change_meta_for_sql_form_record(domain, record))
Esempio n. 53
0
class IndicatorPillowTest(TestCase):

    @classmethod
    def setUpClass(cls):
        super(IndicatorPillowTest, cls).setUpClass()
        cls.config = get_sample_data_source()
        cls.config.save()
        cls.adapter = get_indicator_adapter(cls.config)
        cls.adapter.build_table()
        cls.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886)
        cls.pillow = get_kafka_ucr_pillow()

    @classmethod
    def tearDownClass(cls):
        cls.config.delete()
        cls.adapter.drop_table()
        super(IndicatorPillowTest, cls).tearDownClass()

    def tearDown(self):
        self.adapter.clear_table()

    @patch('corehq.apps.userreports.specs.datetime')
    def _check_sample_doc_state(self, expected_indicators, datetime_mock):
        datetime_mock.utcnow.return_value = self.fake_time_now
        self.adapter.refresh_table()
        self.assertEqual(1, self.adapter.get_query_object().count())
        row = self.adapter.get_query_object()[0]
        for k in row.keys():
            v = getattr(row, k)
            if isinstance(expected_indicators[k], decimal.Decimal):
                self.assertAlmostEqual(expected_indicators[k], v)
            else:
                self.assertEqual(
                    expected_indicators[k], v,
                    'mismatched property: {} (expected {}, was {})'.format(
                        k, expected_indicators[k], v
                    )
                )

    def test_stale_rebuild(self):
        # rebuild indicators in another test will save this
        later_config = DataSourceConfiguration.get(self.config._id)
        later_config.save()
        self.assertNotEqual(self.config._rev, later_config._rev)
        with self.assertRaises(StaleRebuildError):
            self.pillow.rebuild_table(get_indicator_adapter(self.config))

    @patch('corehq.apps.userreports.specs.datetime')
    def test_change_transport(self, datetime_mock):
        datetime_mock.utcnow.return_value = self.fake_time_now
        sample_doc, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now)
        self.pillow.process_change(doc_to_change(sample_doc))
        self._check_sample_doc_state(expected_indicators)

    @patch('corehq.apps.userreports.specs.datetime')
    def test_rebuild_indicators(self, datetime_mock):
        datetime_mock.utcnow.return_value = self.fake_time_now
        sample_doc, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now)
        CommCareCase.get_db().save_doc(sample_doc)
        self.addCleanup(lambda id: CommCareCase.get_db().delete_doc(id), sample_doc['_id'])
        rebuild_indicators(self.config._id)
        self._check_sample_doc_state(expected_indicators)

    def test_bad_integer_datatype(self):
        bad_ints = ['a', '', None]
        for bad_value in bad_ints:
            self.pillow.process_change(doc_to_change({
                '_id': uuid.uuid4().hex,
                'doc_type': 'CommCareCase',
                'domain': 'user-reports',
                'type': 'ticket',
                'priority': bad_value
            }))
        self.adapter.refresh_table()
        # make sure we saved rows to the table for everything
        self.assertEqual(len(bad_ints), self.adapter.get_query_object().count())

    @patch('corehq.apps.userreports.specs.datetime')
    def test_basic_doc_processing(self, datetime_mock):
        datetime_mock.utcnow.return_value = self.fake_time_now
        sample_doc, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now)
        self.pillow.process_change(doc_to_change(sample_doc))
        self._check_sample_doc_state(expected_indicators)

    @patch('corehq.apps.userreports.specs.datetime')
    def test_not_relevant_to_domain(self, datetime_mock):
        datetime_mock.utcnow.return_value = self.fake_time_now
        sample_doc, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now)
        sample_doc['domain'] = 'not-this-domain'
        self.pillow.process_change(doc_to_change(sample_doc))
        self.adapter.refresh_table()
        self.assertEqual(0, self.adapter.get_query_object().count())

    @patch('corehq.apps.userreports.specs.datetime')
    def test_process_doc_from_couch(self, datetime_mock):
        datetime_mock.utcnow.return_value = self.fake_time_now
        sample_doc, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now)

        # make sure case is in DB
        case = CommCareCase.wrap(sample_doc)
        with drop_connected_signals(case_post_save):
            case.save()

        # send to kafka
        since = self.pillow.get_change_feed().get_latest_offsets()
        producer.send_change(topics.CASE, doc_to_change(sample_doc).metadata)

        # run pillow and check changes
        self.pillow.process_changes(since=since, forever=False)
        self._check_sample_doc_state(expected_indicators)
        case.delete()
Esempio n. 54
0
def publish_synclog_saved(synclog):
    from corehq.apps.change_feed import topics
    producer.send_change(topics.SYNCLOG_SQL, change_meta_from_synclog(synclog))
Esempio n. 55
0
 def _produce_changes(self, count):
     for i in range(count):
         meta = ChangeMeta(document_id=uuid.uuid4().hex,
                           data_source_type='dummy-type',
                           data_source_name='dummy-name')
         producer.send_change(topics.CASE, meta)
Esempio n. 56
0
def publish_stub_change(topic):
    meta = ChangeMeta(document_id=uuid.uuid4().hex, data_source_type='dummy-type', data_source_name='dummy-name')
    producer.send_change(topic, meta)
    return meta
Esempio n. 57
0
def publish_ledger_v1_saved(stock_state):
    producer.send_change(topics.LEDGER, change_meta_from_ledger_v1(stock_state))
Esempio n. 58
0
def publish_sms_saved(sms):
    if do_publish():
        producer.send_change(topics.SMS, change_meta_from_sms(sms))
Esempio n. 59
0
def publish_sms_saved(sms):
    if do_publish():
        producer.send_change(topics.SMS, change_meta_from_sms(sms))