def test_pillow_save_to_one_database_at_a_time(self): pillow = get_kafka_ucr_pillow() pillow.bootstrap(configs=[self.ds_1]) sample_doc, _ = get_sample_doc_and_indicators() pillow.process_change(doc_to_change(sample_doc)) self.assertEqual(1, self.ds1_adapter.get_query_object().count()) self.assertEqual(0, self.ds2_adapter.get_query_object().count()) # save to the other pillow.bootstrap(configs=[self.ds_2]) orig_id = sample_doc['_id'] sample_doc['_id'] = uuid.uuid4().hex pillow.process_change(doc_to_change(sample_doc)) self.assertEqual(1, self.ds1_adapter.get_query_object().count()) self.assertEqual(1, self.ds2_adapter.get_query_object().count()) self.assertEqual( 1, self.ds1_adapter.get_query_object().filter_by( doc_id=orig_id).count()) self.assertEqual( 1, self.ds2_adapter.get_query_object().filter_by( doc_id=sample_doc['_id']).count())
def test_process_filter_no_longer_pass(self, datetime_mock): datetime_mock.utcnow.return_value = self.fake_time_now sample_doc, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now) self.pillow.process_change(doc_to_change(sample_doc)) self._check_sample_doc_state(expected_indicators) sample_doc['type'] = 'wrong_type' self.pillow.process_change(doc_to_change(sample_doc)) self.assertEqual(0, self.adapter.get_query_object().count())
def test_not_relevant_to_domain(self, datetime_mock): datetime_mock.utcnow.return_value = self.fake_time_now sample_doc, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now) sample_doc['domain'] = 'not-this-domain' self.pillow.process_change(doc_to_change(sample_doc)) self.adapter.refresh_table() self.assertEqual(0, self.adapter.get_query_object().count())
def test_xform_pillow_couch(self): form = self._make_form() kafka_seq = self._get_kafka_seq() producer.send_change(topics.FORM, doc_to_change(form.to_json()).metadata) self.assertFalse(self.app.has_submissions) self.pillow.process_changes(since=kafka_seq, forever=False) self.assertTrue(Application.get(self.app._id).has_submissions)
def test_pillow_save_to_one_database_at_a_time(self): pillow = get_case_pillow(ucr_configs=[self.ds_1]) sample_doc, _ = get_sample_doc_and_indicators() pillow.process_change(doc_to_change(sample_doc)) self.assertEqual(1, self.ds1_adapter.get_query_object().count()) self.assertEqual(0, self.ds2_adapter.get_query_object().count()) # save to the other pillow = get_case_pillow(ucr_configs=[self.ds_2]) orig_id = sample_doc['_id'] sample_doc['_id'] = uuid.uuid4().hex pillow.process_change(doc_to_change(sample_doc)) self.assertEqual(1, self.ds1_adapter.get_query_object().count()) self.assertEqual(1, self.ds2_adapter.get_query_object().count()) self.assertEqual(1, self.ds1_adapter.get_query_object().filter_by(doc_id=orig_id).count()) self.assertEqual(1, self.ds2_adapter.get_query_object().filter_by(doc_id=sample_doc['_id']).count())
def test_pillow_save_to_multiple_databases(self): self.assertNotEqual(self.ds1_adapter.engine.url, self.ds2_adapter.engine.url) pillow = get_case_pillow(ucr_configs=[self.ds_1, self.ds_2]) self.assertNotEqual(self.ds1_adapter.engine.url, self.ds2_adapter.engine.url) sample_doc, _ = get_sample_doc_and_indicators() pillow.process_change(doc_to_change(sample_doc)) self.assertNotEqual(self.ds1_adapter.engine.url, self.ds2_adapter.engine.url) self.assertEqual(1, self.ds1_adapter.get_query_object().count()) self.assertEqual(1, self.ds2_adapter.get_query_object().count())
def test_check_if_doc_exist(self, datetime_mock): datetime_mock.utcnow.return_value = self.fake_time_now sample_doc, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now) self.assertFalse(self.adapter.doc_exists(sample_doc)) self.pillow.process_change(doc_to_change(sample_doc)) self.assertIs(self.adapter.doc_exists(sample_doc), True)
def test_bad_integer_datatype(self): bad_ints = ['a', '', None] for bad_value in bad_ints: self.pillow.process_change(doc_to_change({ '_id': uuid.uuid4().hex, 'doc_type': 'CommCareCase', 'domain': 'user-reports', 'type': 'ticket', 'priority': bad_value })) # make sure we saved rows to the table for everything self.assertEqual(len(bad_ints), self.adapter.get_query_object().count())
def _add_rows(self, rows): pillow = get_kafka_ucr_pillow() pillow.bootstrap(configs=[self.data_source]) def _get_case(row): return { '_id': uuid.uuid4().hex, 'domain': self.domain, 'doc_type': 'CommCareCase', 'type': 'city', 'name': row.name, 'number': row.number, } for row in rows: pillow.process_change(doc_to_change(_get_case(row)))
def _add_rows(self, rows): pillow = get_case_pillow(ucr_configs=[self.data_source]) def _get_case(row): return { '_id': uuid.uuid4().hex, 'domain': self.domain, 'doc_type': 'CommCareCase', 'type': 'city', 'name': row.name, 'number': row.number, 'just_for_sorting': row.sort_key, } for row in rows: pillow.process_change(doc_to_change(_get_case(row)))
def _add_rows(self, rows): pillow = get_kafka_ucr_pillow() pillow.bootstrap(configs=[self.data_source]) def _get_case(row): return { "_id": uuid.uuid4().hex, "domain": self.domain, "doc_type": "CommCareCase", "type": "city", "name": row.name, "number": row.number, } for row in rows: pillow.process_change(doc_to_change(_get_case(row)))
def _test_process_doc_from_couch(self, datetime_mock, pillow): datetime_mock.utcnow.return_value = self.fake_time_now sample_doc, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now) # make sure case is in DB case = CommCareCase.wrap(sample_doc) with drop_connected_signals(case_post_save): case.save() # send to kafka since = self.pillow.get_change_feed().get_latest_offsets() producer.send_change(topics.CASE, doc_to_change(sample_doc).metadata) # run pillow and check changes pillow.process_changes(since=since, forever=False) self._check_sample_doc_state(expected_indicators) case.delete()
def test_mirroring(self): ds3 = DataSourceConfiguration.wrap(get_sample_data_source().to_json()) ds3.engine_id = DEFAULT_ENGINE_ID ds3.mirrored_engine_ids = ['engine-2'] adapter = get_indicator_adapter(ds3) self.assertEqual(type(adapter.adapter), MultiDBSqlAdapter) self.assertEqual(len(adapter.all_adapters), 2) for db_adapter in adapter.all_adapters: with db_adapter.session_context() as session: self.assertEqual(0, session.query(db_adapter.get_table()).count()) with patch('pillowtop.models.KafkaCheckpoint.get_or_create_for_checkpoint_id'): pillow = get_case_pillow(ucr_configs=[ds3]) sample_doc, _ = get_sample_doc_and_indicators() pillow.process_change(doc_to_change(sample_doc)) for db_adapter in adapter.all_adapters: with db_adapter.session_context() as session: self.assertEqual(1, session.query(db_adapter.get_table()).count())
def test_mirroring(self): ds3 = DataSourceConfiguration.wrap(get_sample_data_source().to_json()) ds3.engine_id = "default" ds3.mirrored_engine_ids = ['engine-2'] adapter = get_indicator_adapter(ds3) self.assertEqual(type(adapter.adapter), MultiDBSqlAdapter) self.assertEqual(len(adapter.all_adapters), 2) for db_adapter in adapter.all_adapters: with db_adapter.session_context() as session: self.assertEqual(0, session.query(db_adapter.get_table()).count()) with patch('pillowtop.models.KafkaCheckpoint.get_or_create_for_checkpoint_id'): pillow = get_case_pillow(ucr_configs=[ds3]) sample_doc, _ = get_sample_doc_and_indicators() pillow.process_change(doc_to_change(sample_doc)) for db_adapter in adapter.all_adapters: with db_adapter.session_context() as session: self.assertEqual(1, session.query(db_adapter.get_table()).count())
def test_case_search_pillow(self): consumer = get_test_kafka_consumer(topics.CASE) kafka_seq = self._get_kafka_seq() case = self._make_case(case_properties={'foo': 'bar'}) producer.send_change(topics.CASE, doc_to_change(case.to_json()).metadata) # confirm change made it to kafka message = next(consumer) change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(case.case_id, change_meta.document_id) self.assertEqual(self.domain, change_meta.domain) # enable case search for domain with patch('corehq.pillows.case_search.domain_needs_search_index', new=MagicMock(return_value=True)) as fake_case_search_enabled_for_domain: # send to elasticsearch self.pillow.process_changes(since=kafka_seq, forever=False) fake_case_search_enabled_for_domain.assert_called_with(self.domain) self._assert_case_in_es(self.domain, case)
class IndicatorPillowTest(TestCase): @classmethod def setUpClass(cls): super(IndicatorPillowTest, cls).setUpClass() cls.config = get_sample_data_source() cls.config.save() cls.adapter = get_indicator_adapter(cls.config) cls.adapter.build_table() cls.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886) cls.pillow = get_kafka_ucr_pillow() @classmethod def tearDownClass(cls): cls.config.delete() cls.adapter.drop_table() super(IndicatorPillowTest, cls).tearDownClass() def tearDown(self): self.adapter.clear_table() @patch('corehq.apps.userreports.specs.datetime') def _check_sample_doc_state(self, expected_indicators, datetime_mock): datetime_mock.utcnow.return_value = self.fake_time_now self.adapter.refresh_table() self.assertEqual(1, self.adapter.get_query_object().count()) row = self.adapter.get_query_object()[0] for k in row.keys(): v = getattr(row, k) if isinstance(expected_indicators[k], decimal.Decimal): self.assertAlmostEqual(expected_indicators[k], v) else: self.assertEqual( expected_indicators[k], v, 'mismatched property: {} (expected {}, was {})'.format( k, expected_indicators[k], v ) ) def test_stale_rebuild(self): # rebuild indicators in another test will save this later_config = DataSourceConfiguration.get(self.config._id) later_config.save() self.assertNotEqual(self.config._rev, later_config._rev) with self.assertRaises(StaleRebuildError): self.pillow.rebuild_table(get_indicator_adapter(self.config)) @patch('corehq.apps.userreports.specs.datetime') def test_change_transport(self, datetime_mock): datetime_mock.utcnow.return_value = self.fake_time_now sample_doc, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now) self.pillow.process_change(doc_to_change(sample_doc)) self._check_sample_doc_state(expected_indicators) @patch('corehq.apps.userreports.specs.datetime') def test_rebuild_indicators(self, datetime_mock): datetime_mock.utcnow.return_value = self.fake_time_now sample_doc, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now) CommCareCase.get_db().save_doc(sample_doc) self.addCleanup(lambda id: CommCareCase.get_db().delete_doc(id), sample_doc['_id']) rebuild_indicators(self.config._id) self._check_sample_doc_state(expected_indicators) def test_bad_integer_datatype(self): bad_ints = ['a', '', None] for bad_value in bad_ints: self.pillow.process_change(doc_to_change({ '_id': uuid.uuid4().hex, 'doc_type': 'CommCareCase', 'domain': 'user-reports', 'type': 'ticket', 'priority': bad_value })) self.adapter.refresh_table() # make sure we saved rows to the table for everything self.assertEqual(len(bad_ints), self.adapter.get_query_object().count()) @patch('corehq.apps.userreports.specs.datetime') def test_basic_doc_processing(self, datetime_mock): datetime_mock.utcnow.return_value = self.fake_time_now sample_doc, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now) self.pillow.process_change(doc_to_change(sample_doc)) self._check_sample_doc_state(expected_indicators) @patch('corehq.apps.userreports.specs.datetime') def test_not_relevant_to_domain(self, datetime_mock): datetime_mock.utcnow.return_value = self.fake_time_now sample_doc, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now) sample_doc['domain'] = 'not-this-domain' self.pillow.process_change(doc_to_change(sample_doc)) self.adapter.refresh_table() self.assertEqual(0, self.adapter.get_query_object().count()) @patch('corehq.apps.userreports.specs.datetime') def test_process_doc_from_couch(self, datetime_mock): datetime_mock.utcnow.return_value = self.fake_time_now sample_doc, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now) # make sure case is in DB case = CommCareCase.wrap(sample_doc) with drop_connected_signals(case_post_save): case.save() # send to kafka since = self.pillow.get_change_feed().get_latest_offsets() producer.send_change(topics.CASE, doc_to_change(sample_doc).metadata) # run pillow and check changes self.pillow.process_changes(since=since, forever=False) self._check_sample_doc_state(expected_indicators) case.delete()
class IndicatorPillowTest(IndicatorPillowTestBase): @softer_assert() def setUp(self): super(IndicatorPillowTest, self).setUp() self.pillow = get_kafka_ucr_pillow() self.pillow.bootstrap(configs=[self.config]) with trap_extra_setup(KafkaUnavailableError): self.pillow.get_change_feed().get_current_offsets() @run_with_all_ucr_backends def test_stale_rebuild(self): later_config = copy(self.config) later_config.save() self.assertNotEqual(self.config._rev, later_config._rev) with self.assertRaises(StaleRebuildError): self.pillow.rebuild_table(get_indicator_adapter(self.config)) @patch('corehq.apps.userreports.specs.datetime') @run_with_all_ucr_backends def test_change_transport(self, datetime_mock): datetime_mock.utcnow.return_value = self.fake_time_now sample_doc, expected_indicators = get_sample_doc_and_indicators( self.fake_time_now) self.pillow.process_change(doc_to_change(sample_doc)) self._check_sample_doc_state(expected_indicators) @patch('corehq.apps.userreports.specs.datetime') @run_with_all_ucr_backends def test_rebuild_indicators(self, datetime_mock): datetime_mock.utcnow.return_value = self.fake_time_now self.config.save() sample_doc, expected_indicators = get_sample_doc_and_indicators( self.fake_time_now) CommCareCase.get_db().save_doc(sample_doc) self.addCleanup(lambda id: CommCareCase.get_db().delete_doc(id), sample_doc['_id']) rebuild_indicators(self.config._id) self._check_sample_doc_state(expected_indicators) @run_with_all_ucr_backends def test_bad_integer_datatype(self): self.config.save() bad_ints = ['a', '', None] for bad_value in bad_ints: self.pillow.process_change( doc_to_change({ '_id': uuid.uuid4().hex, 'doc_type': 'CommCareCase', 'domain': 'user-reports', 'type': 'ticket', 'priority': bad_value })) self.adapter.refresh_table() # make sure we saved rows to the table for everything self.assertEqual(len(bad_ints), self.adapter.get_query_object().count()) @patch('corehq.apps.userreports.specs.datetime') @run_with_all_ucr_backends def test_basic_doc_processing(self, datetime_mock): datetime_mock.utcnow.return_value = self.fake_time_now sample_doc, expected_indicators = get_sample_doc_and_indicators( self.fake_time_now) self.pillow.process_change(doc_to_change(sample_doc)) self._check_sample_doc_state(expected_indicators) @patch('corehq.apps.userreports.specs.datetime') @run_with_all_ucr_backends def test_process_doc_from_couch(self, datetime_mock): datetime_mock.utcnow.return_value = self.fake_time_now sample_doc, expected_indicators = get_sample_doc_and_indicators( self.fake_time_now) # make sure case is in DB case = CommCareCase.wrap(sample_doc) with drop_connected_signals(case_post_save): case.save() # send to kafka since = self.pillow.get_change_feed().get_current_offsets() producer.send_change(topics.CASE, doc_to_change(sample_doc).metadata) # run pillow and check changes self.pillow.process_changes(since=since, forever=False) self._check_sample_doc_state(expected_indicators) case.delete()
def test_basic_doc_processing(self, datetime_mock): datetime_mock.utcnow.return_value = self.fake_time_now sample_doc, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now) self.pillow.process_change(doc_to_change(sample_doc)) self._check_sample_doc_state(expected_indicators)
def _process_docs(self, docs): pillow = get_case_pillow(ucr_configs=[self.data_source]) for doc in docs: pillow.process_change(doc_to_change(doc))
def test_not_relevant_to_domain(self, datetime_mock): datetime_mock.utcnow.return_value = self.fake_time_now sample_doc, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now) sample_doc['domain'] = 'not-this-domain' self.pillow.process_change(doc_to_change(sample_doc)) self.assertEqual(0, self.adapter.get_query_object().count())
def _process_docs(self, docs): pillow = get_kafka_ucr_pillow() pillow.bootstrap(configs=[self.data_source]) for doc in docs: pillow.process_change(doc_to_change(doc))