def test_xform_pillow_sql(self): consumer = get_test_kafka_consumer(topics.FORM_SQL) # have to get the seq id before the change is processed kafka_seq = consumer.offsets()['fetch'][(topics.FORM_SQL, 0)] metadata = TestFormMetadata(domain=self.domain) form = get_form_ready_to_save(metadata, is_db_test=True) form_processor = FormProcessorInterface(domain=self.domain) form_processor.save_processed_models([form]) # confirm change made it to kafka message = consumer.next() change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(form.form_id, change_meta.document_id) self.assertEqual(self.domain, change_meta.domain) # send to elasticsearch sql_pillow = get_sql_xform_to_elasticsearch_pillow() sql_pillow.process_changes(since=kafka_seq, forever=False) self.elasticsearch.indices.refresh(self.pillow.es_index) # confirm change made it to elasticserach results = FormES().run() self.assertEqual(1, results.total) form_doc = results.hits[0] self.assertEqual(self.domain, form_doc['domain']) self.assertEqual(metadata.xmlns, form_doc['xmlns']) self.assertEqual('XFormInstance', form_doc['doc_type'])
def test_sql_sms_pillow(self, mock_do_publish): mock_do_publish.return_value = True consumer = get_test_kafka_consumer(topics.SMS) # get the seq id before the change is published kafka_seq = get_topic_offset(topics.SMS) # create an sms sms_and_dict = create_fake_sms(self.domain) self.sms = sms_and_dict.sms sms_json = self._to_json(sms_and_dict.sms_dict, self.sms) # test serialization self.assertEqual(self.sms.to_json(), sms_json) # publish the change and confirm it gets to kafka self.sms.publish_change() message = next(consumer) change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(self.sms.couch_id, change_meta.document_id) self.assertEqual(self.domain, change_meta.domain) # send to elasticsearch sms_pillow = get_sql_sms_pillow('SqlSMSPillow') sms_pillow.process_changes(since=kafka_seq, forever=False) self.elasticsearch.indices.refresh(SMS_INDEX_INFO.index) # confirm change made it to elasticserach results = SMSES().run() self.assertEqual(1, results.total) sms_doc = results.hits[0] self.assertEqual(sms_doc, sms_json)
def test_ledger_pillow(self): factory = CaseFactory(domain=self.domain) case = factory.create_case() consumer = get_test_kafka_consumer(topics.LEDGER) # have to get the seq id before the change is processed kafka_seq = get_topic_offset(topics.LEDGER) from corehq.apps.commtrack.tests.util import get_single_balance_block from corehq.apps.hqcase.utils import submit_case_blocks submit_case_blocks( [get_single_balance_block(case.case_id, self.product_id, 100)], self.domain) ref = UniqueLedgerReference(case.case_id, 'stock', self.product_id) # confirm change made it to kafka message = consumer.next() change_meta = change_meta_from_kafka_message(message.value) if should_use_sql_backend(self.domain): self.assertEqual(ref.as_id(), change_meta.document_id) else: from corehq.apps.commtrack.models import StockState state = StockState.objects.all() self.assertEqual(1, len(state)) self.assertEqual(state[0].pk, change_meta.document_id) # self.assertEqual(self.domain, change_meta.domain) # send to elasticsearch self.pillow.process_changes(since=kafka_seq, forever=False) self.elasticsearch.indices.refresh(LEDGER_INDEX_INFO.index) # confirm change made it to elasticserach self._assert_ledger_in_es(ref)
def test_ledger_pillow(self): factory = CaseFactory(domain=self.domain) case = factory.create_case() consumer = get_test_kafka_consumer(topics.LEDGER) # have to get the seq id before the change is processed kafka_seq = get_topic_offset(topics.LEDGER) from corehq.apps.commtrack.tests.util import get_single_balance_block from corehq.apps.hqcase.utils import submit_case_blocks submit_case_blocks([ get_single_balance_block(case.case_id, self.product_id, 100)], self.domain ) ref = UniqueLedgerReference(case.case_id, 'stock', self.product_id) # confirm change made it to kafka message = consumer.next() change_meta = change_meta_from_kafka_message(message.value) if should_use_sql_backend(self.domain): self.assertEqual(ref.as_id(), change_meta.document_id) else: from corehq.apps.commtrack.models import StockState state = StockState.objects.all() self.assertEqual(1, len(state)) self.assertEqual(state[0].pk, change_meta.document_id) # self.assertEqual(self.domain, change_meta.domain) # send to elasticsearch self.pillow.process_changes(since=kafka_seq, forever=False) self.elasticsearch.indices.refresh(LEDGER_INDEX_INFO.index) # confirm change made it to elasticserach self._assert_ledger_in_es(ref)
def test_pillow(self): from corehq.apps.change_feed.topics import get_topic_offset from corehq.pillows.synclog import get_user_sync_history_pillow consumer = get_test_kafka_consumer(topics.SYNCLOG_SQL) # get the seq id before the change is published kafka_seq = get_topic_offset(topics.SYNCLOG_SQL) # make sure user has empty reporting-metadata before a sync self.assertEqual(self.ccuser.reporting_metadata.last_syncs, []) # do a sync synclog = SyncLog(domain=self.domain.name, user_id=self.ccuser._id, date=datetime.datetime(2015, 7, 1, 0, 0)) synclog.save() # make sure kafka change updates the user with latest sync info message = next(consumer) change_meta = change_meta_from_kafka_message(message.value) synclog = self._get_latest_synclog() self.assertEqual(change_meta.document_id, synclog._id) self.assertEqual(change_meta.domain, self.domain.name) # make sure processor updates the user correctly pillow = get_user_sync_history_pillow() pillow.process_changes(since=kafka_seq, forever=False) ccuser = CommCareUser.get(self.ccuser._id) self.assertEqual(len(ccuser.reporting_metadata.last_syncs), 1) self.assertEqual(ccuser.reporting_metadata.last_syncs[0].sync_date, synclog.date) self.assertEqual(ccuser.reporting_metadata.last_sync_for_user.sync_date, synclog.date)
def test_app_pillow_kafka(self): consumer = get_test_kafka_consumer(topics.APP) # have to get the seq id before the change is processed kafka_seq = get_topic_offset(topics.APP) couch_seq = get_current_seq(Application.get_db()) app_name = 'app-{}'.format(uuid.uuid4().hex) app = self._create_app(app_name) app_db_pillow = get_application_db_kafka_pillow('test_app_db_pillow') app_db_pillow.process_changes(couch_seq, forever=False) # confirm change made it to kafka message = next(consumer) change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(app._id, change_meta.document_id) self.assertEqual(self.domain, change_meta.domain) # send to elasticsearch app_pillow = get_app_to_elasticsearch_pillow() app_pillow.process_changes(since=kafka_seq, forever=False) self.es.indices.refresh(APP_INDEX_INFO.index) # confirm change made it to elasticserach results = AppES().run() self.assertEqual(1, results.total) app_doc = results.hits[0] self.assertEqual(self.domain, app_doc['domain']) self.assertEqual(app['_id'], app_doc['_id']) self.assertEqual(app_name, app_doc['name'])
def test_form_is_published(self): kafka_consumer = get_test_kafka_consumer(topics.FORM_SQL) form = create_and_save_a_form(self.domain) message = kafka_consumer.next() change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(form.form_id, change_meta.document_id) self.assertEqual(self.domain, change_meta.domain)
def test_process_change(self): consumer = KafkaConsumer( topics.CASE, group_id='test-consumer', bootstrap_servers=[settings.KAFKA_URL], consumer_timeout_ms=100, ) pillow = ChangeFeedPillow(self._fake_couch, kafka=get_kafka_client(), checkpoint=None) document = { 'doc_type': 'CommCareCase', 'type': 'mother', 'domain': 'kafka-test-domain', } pillow.process_change(Change(id='test-id', sequence_id='3', document=document)) message = consumer.next() change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(COUCH, change_meta.data_source_type) self.assertEqual(self._fake_couch.dbname, change_meta.data_source_name) self.assertEqual('test-id', change_meta.document_id) self.assertEqual(document['doc_type'], change_meta.document_type) self.assertEqual(document['type'], change_meta.document_subtype) self.assertEqual(document['domain'], change_meta.domain) self.assertEqual(False, change_meta.is_deletion) with self.assertRaises(ConsumerTimeout): consumer.next()
def test_sql_sms_pillow(self, mock_do_publish): mock_do_publish.return_value = True consumer = get_test_kafka_consumer(topics.SMS) # get the seq id before the change is published kafka_seq = get_topic_offset(topics.SMS) # create an sms self._create_sms() sms_json = self._to_json(self.sms_dict, self.sms) # test serialization self.assertEqual(self.sms.to_json(), sms_json) # publish the change and confirm it gets to kafka self.sms.publish_change() message = consumer.next() change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(self.sms.couch_id, change_meta.document_id) self.assertEqual(self.domain, change_meta.domain) # send to elasticsearch sms_pillow = get_sql_sms_pillow('SqlSMSPillow') sms_pillow.process_changes(since=kafka_seq, forever=False) self.elasticsearch.indices.refresh(SMS_INDEX_INFO.index) # confirm change made it to elasticserach results = SMSES().run() self.assertEqual(1, results.total) sms_doc = results.hits[0] self.assertEqual(sms_doc, sms_json)
def test_case_pillow_sql(self): consumer = get_test_kafka_consumer(topics.CASE_SQL) # have to get the seq id before the change is processed kafka_seq = consumer.offsets()['fetch'][(topics.CASE_SQL, 0)] # make a case case_id = uuid.uuid4().hex case_name = 'case-name-{}'.format(uuid.uuid4().hex) case = self._make_a_case(case_id, case_name) # confirm change made it to kafka message = consumer.next() change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(case.case_id, change_meta.document_id) self.assertEqual(self.domain, change_meta.domain) # send to elasticsearch sql_pillow = get_sql_case_to_elasticsearch_pillow() sql_pillow.process_changes(since=kafka_seq, forever=False) self.elasticsearch.indices.refresh(self.pillow.es_index) # confirm change made it to elasticserach results = CaseES().run() self.assertEqual(1, results.total) case_doc = results.hits[0] self.assertEqual(self.domain, case_doc['domain']) self.assertEqual(case_id, case_doc['_id']) self.assertEqual(case_name, case_doc['name'])
def test_publish_timestamp(self): document = { 'doc_type': 'CommCareCase', 'type': 'mother', 'domain': None, } self.pillow.process_change(Change(id='test-id', sequence_id='3', document=document)) message = next(self.consumer) change_meta = change_meta_from_kafka_message(message.value) self.assertLessEqual(change_meta.publish_timestamp, datetime.utcnow())
def test_no_domain(self): document = { 'doc_type': 'CommCareCase', 'type': 'mother', 'domain': None, } self.pillow.process_change(Change(id='test-id', sequence_id='3', document=document)) message = self.consumer.next() change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(document['domain'], change_meta.domain)
def test_no_domain(self): document = { 'doc_type': 'CommCareCase', 'type': 'mother', 'domain': None, } self.pillow.process_change(Change(id='test-id', sequence_id='3', document=document)) message = next(self.consumer) change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(document['domain'], change_meta.domain)
def test_ledger_pillow_sql(self): factory = CaseFactory(domain=self.domain) case = factory.create_case() consumer = get_test_kafka_consumer(topics.LEDGER) # have to get the seq id before the change is processed kafka_seq = consumer.offsets()['fetch'][(topics.LEDGER, 0)] from corehq.apps.commtrack.tests import get_single_balance_block from corehq.apps.hqcase.utils import submit_case_blocks submit_case_blocks([ get_single_balance_block(case.case_id, self.product_id, 100)], self.domain ) ref = UniqueLedgerReference(case.case_id, 'stock', self.product_id) # confirm change made it to kafka message = consumer.next() change_meta = change_meta_from_kafka_message(message.value) if should_use_sql_backend(self.domain): self.assertEqual(ref.as_id(), change_meta.document_id) else: from corehq.apps.commtrack.models import StockState state = StockState.objects.all() self.assertEqual(1, len(state)) self.assertEqual(state[0].pk, change_meta.document_id) # self.assertEqual(self.domain, change_meta.domain) # send to elasticsearch self.pillow.process_changes(since=kafka_seq, forever=False) self.elasticsearch.indices.refresh(LEDGER_INDEX_INFO.index) # confirm change made it to elasticserach results = self.elasticsearch.search( LEDGER_INDEX_INFO.index, LEDGER_INDEX_INFO.type, body={ "query": { "bool": { "must": [{ "match_all": {} }] } } } ) self.assertEqual(1, results['hits']['total']) ledger_doc = results['hits']['hits'][0]['_source'] self.assertEqual(self.domain, ledger_doc['domain']) self.assertEqual(ref.case_id, ledger_doc['case_id']) self.assertEqual(ref.section_id, ledger_doc['section_id']) self.assertEqual(ref.entry_id, ledger_doc['entry_id'])
def test_ledger_pillow_sql(self): factory = CaseFactory(domain=self.domain) case = factory.create_case() consumer = get_test_kafka_consumer(topics.LEDGER) # have to get the seq id before the change is processed kafka_seq = get_topic_offset(topics.LEDGER) from corehq.apps.commtrack.tests.util import get_single_balance_block from corehq.apps.hqcase.utils import submit_case_blocks submit_case_blocks([ get_single_balance_block(case.case_id, self.product_id, 100)], self.domain ) ref = UniqueLedgerReference(case.case_id, 'stock', self.product_id) # confirm change made it to kafka message = consumer.next() change_meta = change_meta_from_kafka_message(message.value) if should_use_sql_backend(self.domain): self.assertEqual(ref.as_id(), change_meta.document_id) else: from corehq.apps.commtrack.models import StockState state = StockState.objects.all() self.assertEqual(1, len(state)) self.assertEqual(state[0].pk, change_meta.document_id) # self.assertEqual(self.domain, change_meta.domain) # send to elasticsearch self.pillow.process_changes(since=kafka_seq, forever=False) self.elasticsearch.indices.refresh(LEDGER_INDEX_INFO.index) # confirm change made it to elasticserach results = self.elasticsearch.search( LEDGER_INDEX_INFO.index, LEDGER_INDEX_INFO.type, body={ "query": { "bool": { "must": [{ "match_all": {} }] } } } ) self.assertEqual(1, results['hits']['total']) ledger_doc = results['hits']['hits'][0]['_source'] self.assertEqual(self.domain, ledger_doc['domain']) self.assertEqual(ref.case_id, ledger_doc['case_id']) self.assertEqual(ref.section_id, ledger_doc['section_id']) self.assertEqual(ref.entry_id, ledger_doc['entry_id'])
def test_duplicate_case_published(self): case_id = uuid.uuid4().hex form_xml = get_simple_form_xml(uuid.uuid4().hex, case_id) submit_form_locally(form_xml, domain=self.domain)[1] self.assertEqual(1, len(CaseAccessors(self.domain).get_case_ids_in_domain())) case_consumer = get_test_kafka_consumer(topics.CASE_SQL) dupe_form = submit_form_locally(form_xml, domain=self.domain)[1] self.assertTrue(dupe_form.is_duplicate) # check the case was republished case_meta = change_meta_from_kafka_message(case_consumer.next().value) self.assertEqual(case_id, case_meta.document_id) self.assertEqual(self.domain, case_meta.domain)
def test_duplicate_form_published(self): form_id = uuid.uuid4().hex form_xml = get_simple_form_xml(form_id) orig_form = submit_form_locally(form_xml, domain=self.domain)[1] self.assertEqual(form_id, orig_form.form_id) self.assertEqual(1, len(self.form_accessors.get_all_form_ids_in_domain())) form_consumer = get_test_kafka_consumer(topics.FORM_SQL) # post an exact duplicate dupe_form = submit_form_locally(form_xml, domain=self.domain)[1] self.assertTrue(dupe_form.is_duplicate) self.assertNotEqual(form_id, dupe_form.form_id) self.assertEqual(form_id, dupe_form.orig_id) # make sure changes made it to kafka # first the dupe dupe_form_meta = change_meta_from_kafka_message(form_consumer.next().value) self.assertEqual(dupe_form.form_id, dupe_form_meta.document_id) # then the original form orig_form_meta = change_meta_from_kafka_message(form_consumer.next().value) self.assertEqual(orig_form.form_id, orig_form_meta.document_id) self.assertEqual(self.domain, orig_form_meta.domain)
def test_duplicate_ledger_published(self): # setup products and case product_a = make_product(self.domain, 'A Product', 'prodcode_a') product_b = make_product(self.domain, 'B Product', 'prodcode_b') case_id = uuid.uuid4().hex form_xml = get_simple_form_xml(uuid.uuid4().hex, case_id) submit_form_locally(form_xml, domain=self.domain)[1] # submit ledger data balances = ( (product_a._id, 100), (product_b._id, 50), ) ledger_blocks = [ get_single_balance_block(case_id, prod_id, balance) for prod_id, balance in balances ] form = submit_case_blocks(ledger_blocks, self.domain) # submit duplicate ledger_consumer = get_test_kafka_consumer(topics.LEDGER) dupe_form = submit_form_locally(form.get_xml(), domain=self.domain)[1] self.assertTrue(dupe_form.is_duplicate) # confirm republished ledger_meta_a = change_meta_from_kafka_message(ledger_consumer.next().value) ledger_meta_b = change_meta_from_kafka_message(ledger_consumer.next().value) format_id = lambda product_id: '{}/{}/{}'.format(case_id, 'stock', product_id) expected_ids = {format_id(product_a._id), format_id(product_b._id)} for meta in [ledger_meta_a, ledger_meta_b]: self.assertTrue(meta.document_id in expected_ids) expected_ids.remove(meta.document_id) self.assertEqual(self.domain, meta.domain) # cleanup product_a.delete() product_b.delete()
def test_form_pillow_non_existant_build_id(self): consumer = get_test_kafka_consumer(topics.FORM, topics.FORM_SQL) kafka_seq = self._get_kafka_seq() form = self._make_form(build_id='not-here') # confirm change made it to kafka message = next(consumer) change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(form.form_id, change_meta.document_id) self.assertEqual(self.domain, change_meta.domain) self.assertFalse(self.app.has_submissions) self.pillow.process_changes(since=kafka_seq, forever=False) self.assertFalse(Application.get(self.app._id).has_submissions)
def test_form_pillow_mismatch_domains(self): consumer = get_test_kafka_consumer(topics.FORM, topics.FORM_SQL) kafka_seq = self._get_kafka_seq() self.app.domain = 'not-this-domain' self.app.save() form = self._make_form() # confirm change made it to kafka message = consumer.next() change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(form.form_id, change_meta.document_id) self.assertEqual(self.domain, change_meta.domain) self.assertFalse(self.app.has_submissions) self.pillow.process_changes(since=kafka_seq, forever=False) self.assertFalse(Application.get(self.app._id).has_submissions)
def test(self): document = { 'doc_type': 'CommCareCase', 'type': 'mother', 'domain': 'kafka-test-domain', } change = Change(id='test-id', sequence_id='3', document=document) populate_change_metadata(change, SOURCE_COUCH, self._fake_couch.dbname) with patch('pillow_retry.api.get_pillow_by_name', return_value=self.pillow): # first change creates error message = 'test retry 1' self.pillow.process_change = MagicMock( side_effect=TestException(message)) self.pillow.process_with_error_handling( change, PillowRuntimeContext(changes_seen=0)) errors = self._check_errors(1, message) # second attempt updates error process_pillow_retry(errors[0]) errors = self._check_errors(2) # third attempt successful self.pillow.process_change = self.original_process_change process_pillow_retry(errors[0]) errors = list( PillowError.objects.filter(pillow=self.pillow.pillow_id).all()) self.assertEqual(0, len(errors)) message = next(self.consumer) change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(SOURCE_COUCH, change_meta.data_source_type) self.assertEqual(self._fake_couch.dbname, change_meta.data_source_name) self.assertEqual('test-id', change_meta.document_id) self.assertEqual(document['doc_type'], change_meta.document_type) self.assertEqual(document['type'], change_meta.document_subtype) self.assertEqual(document['domain'], change_meta.domain) self.assertEqual(False, change_meta.is_deletion)
def test_hard_delete_app(self): consumer = get_test_kafka_consumer(topics.APP) # have to get the seq id before the change is processed kafka_seq = get_topic_offset(topics.APP) couch_seq = get_current_seq(Application.get_db()) app = self._create_app('test_hard_deleted_app', cleanup=False) app_db_pillow = get_application_db_kafka_pillow('test_app_db_pillow') app_db_pillow.process_changes(couch_seq, forever=False) # confirm change made it to kafka message = next(consumer) change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(app._id, change_meta.document_id) self.assertEqual(self.domain, change_meta.domain) # send to elasticsearch app_pillow = get_app_to_elasticsearch_pillow() app_pillow.process_changes(since=kafka_seq, forever=False) self.es.indices.refresh(APP_INDEX_INFO.index) # confirm change made it to elasticserach results = AppES().run() self.assertEqual(1, results.total) couch_seq = get_current_seq(Application.get_db()) kafka_seq = get_topic_offset(topics.APP) app.delete() app_db_pillow.process_changes(couch_seq, forever=False) # confirm change made it to kafka. Would raise StopIteration otherwise next(consumer) # send to elasticsearch app_pillow = get_app_to_elasticsearch_pillow() app_pillow.process_changes(since=kafka_seq, forever=False) self.es.indices.refresh(APP_INDEX_INFO.index) # confirm deletion made it to elasticserach results = AppES().run() self.assertEqual(0, results.total)
def test_sql_case_search_pillow(self): consumer = get_test_kafka_consumer(topics.CASE_SQL) # have to get the seq id before the change is processed kafka_seq = self._get_kafka_seq() case = self._make_case(case_properties={'something': 'something_else'}) # confirm change made it to kafka message = next(consumer) change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(case.case_id, change_meta.document_id) self.assertEqual(self.domain, change_meta.domain) # enable case search for domain with patch('corehq.pillows.case_search.domain_needs_search_index', new=MagicMock(return_value=True)) as fake_case_search_enabled_for_domain: # send to elasticsearch self.pillow.process_changes(since=kafka_seq, forever=False) fake_case_search_enabled_for_domain.assert_called_with(self.domain) self._assert_case_in_es(self.domain, case)
def test_process_change(self): document = { 'doc_type': 'CommCareCase', 'type': 'mother', 'domain': 'kafka-test-domain', } self.pillow.process_change(Change(id='test-id', sequence_id='3', document=document)) message = next(self.consumer) change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(SOURCE_COUCH, change_meta.data_source_type) self.assertEqual(self._fake_couch.dbname, change_meta.data_source_name) self.assertEqual('test-id', change_meta.document_id) self.assertEqual(document['doc_type'], change_meta.document_type) self.assertEqual(document['type'], change_meta.document_subtype) self.assertEqual(document['domain'], change_meta.domain) self.assertEqual(False, change_meta.is_deletion) with self.assertRaises(StopIteration): next(self.consumer)
def test_case_search_pillow(self): consumer = get_test_kafka_consumer(topics.CASE) kafka_seq = self._get_kafka_seq() case = self._make_case(case_properties={'foo': 'bar'}) producer.send_change(topics.CASE, doc_to_change(case.to_json()).metadata) # confirm change made it to kafka message = next(consumer) change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(case.case_id, change_meta.document_id) self.assertEqual(self.domain, change_meta.domain) # enable case search for domain with patch('corehq.pillows.case_search.domain_needs_search_index', new=MagicMock(return_value=True)) as fake_case_search_enabled_for_domain: # send to elasticsearch self.pillow.process_changes(since=kafka_seq, forever=False) fake_case_search_enabled_for_domain.assert_called_with(self.domain) self._assert_case_in_es(self.domain, case)
def test_process_change(self): document = { 'doc_type': 'CommCareCase', 'type': 'mother', 'domain': 'kafka-test-domain', } self.pillow.process_change(Change(id='test-id', sequence_id='3', document=document)) message = self.consumer.next() change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(COUCH, change_meta.data_source_type) self.assertEqual(self._fake_couch.dbname, change_meta.data_source_name) self.assertEqual('test-id', change_meta.document_id) self.assertEqual(document['doc_type'], change_meta.document_type) self.assertEqual(document['type'], change_meta.document_subtype) self.assertEqual(document['domain'], change_meta.domain) self.assertEqual(False, change_meta.is_deletion) with self.assertRaises(ConsumerTimeout): self.consumer.next()
def test_pillow_form_processed(self): from corehq.apps.change_feed.topics import get_topic_offset from corehq.pillows.synclog import get_user_sync_history_pillow self.assertEqual(UserReportingMetadataStaging.objects.count(), 0) UserReportingMetadataStaging.add_submission( self.domain.name, self.ccuser._id, '123', None, None, {}, datetime.datetime.utcnow() ) self.assertEqual(UserReportingMetadataStaging.objects.count(), 1) consumer = get_test_kafka_consumer(topics.SYNCLOG_SQL) # get the seq id before the change is published kafka_seq = get_topic_offset(topics.SYNCLOG_SQL) # make sure user has empty reporting-metadata before a sync ccuser = CommCareUser.get(self.ccuser._id) self.assertEqual(ccuser.reporting_metadata.last_syncs, []) # do a sync synclog = SimplifiedSyncLog(domain=self.domain.name, user_id=self.ccuser._id, date=datetime.datetime(2015, 7, 1, 0, 0), app_id='123') synclog.save() # make sure kafka change updates the user with latest sync info message = next(consumer) change_meta = change_meta_from_kafka_message(message.value) synclog = self._get_latest_synclog() self.assertEqual(change_meta.document_id, synclog._id) self.assertEqual(change_meta.domain, self.domain.name) self.assertEqual(UserReportingMetadataStaging.objects.count(), 1) # make sure processor updates the user correctly pillow = get_user_sync_history_pillow() pillow.process_changes(since=kafka_seq) process_reporting_metadata_staging() ccuser = CommCareUser.get(self.ccuser._id) self.assertEqual(len(ccuser.reporting_metadata.last_syncs), 1) self.assertEqual(ccuser.reporting_metadata.last_syncs[0].sync_date, synclog.date) self.assertEqual(ccuser.reporting_metadata.last_sync_for_user.sync_date, synclog.date)
def test_case_is_published(self): kafka_consumer = get_test_kafka_consumer(topics.CASE_SQL) case = create_and_save_a_case(self.domain, case_id=uuid.uuid4().hex, case_name='test case') change_meta = change_meta_from_kafka_message(kafka_consumer.next().value) self.assertEqual(case.case_id, change_meta.document_id) self.assertEqual(self.domain, change_meta.domain)