Example #1
0
    def test_add_index(self):
        # build the table without an index
        self._setup_data_source('add_index')

        insp = reflection.Inspector.from_engine(self.engine)
        table_name = get_table_name(self.config.domain, self.config.table_id)
        self.assertEqual(len(insp.get_indexes(table_name)), 0)

        # add the index to the config
        config = self._get_config('add_index')
        self.addCleanup(config.delete)
        config.configured_indicators[0]['create_index'] = True
        config.save()
        adapter = get_indicator_adapter(config)

        with mock.patch('corehq.apps.userreports.pillow_utils.rebuild_table') as rebuild_table, \
            mock.patch('corehq.apps.userreports.pillow_utils.migrate_tables_with_logging') as migrate_table:
            get_case_pillow(ucr_configs=[config])
            self.assertFalse(rebuild_table.called)
            self.assertTrue(migrate_table.called)

        engine = adapter.engine
        insp = reflection.Inspector.from_engine(engine)
        # note the index is not yet created
        self.assertEqual(len(insp.get_indexes(table_name)), 0)
Example #2
0
 def test_reuse_cache_chunked(self):
     pillow1 = get_case_pillow(topics=['case-sql'],
                               processor_chunk_size=100,
                               ucr_configs=self.configs[:1])
     pillow2 = get_case_pillow(topics=['case-sql'],
                               processor_chunk_size=100,
                               ucr_configs=self.configs)
     self._test_reuse_cache(pillow1, pillow2, 11)
Example #3
0
    def setUp(self):
        config1 = get_data_source_with_related_doc_type()
        config1.save()
        config2 = get_data_source_with_related_doc_type()
        config2.table_id = 'other-config'
        config2.save()
        self.configs = [config1, config2]
        self.adapters = [get_indicator_adapter(c) for c in self.configs]

        # one pillow that has one config, the other has both configs
        self.pillow1 = get_case_pillow(topics=['case-sql'], ucr_configs=[config1], processor_chunk_size=0)
        self.pillow2 = get_case_pillow(topics=['case-sql'], ucr_configs=self.configs, processor_chunk_size=0)

        self.pillow1.get_change_feed().get_latest_offsets()
Example #4
0
    def setUp(self):
        config1 = get_data_source_with_related_doc_type()
        config1.save()
        config2 = get_data_source_with_related_doc_type()
        config2.table_id = 'other-config'
        config2.save()
        self.configs = [config1, config2]
        self.adapters = [get_indicator_adapter(c) for c in self.configs]

        # one pillow that has one config, the other has both configs
        self.pillow1 = get_case_pillow(topics=['case-sql'], ucr_configs=[config1], processor_chunk_size=0)
        self.pillow2 = get_case_pillow(topics=['case-sql'], ucr_configs=self.configs, processor_chunk_size=0)

        self.pillow1.get_change_feed().get_latest_offsets()
    def test_add_non_nullable_column(self):
        self._setup_data_source('add_non_nullable_col')

        # assert new date isn't in the config
        insp = reflection.Inspector.from_engine(self.engine)
        table_name = get_table_name(self.config.domain, self.config.table_id)
        self.assertEqual(
            len([
                c for c in insp.get_columns(table_name)
                if c['name'] == 'new_date'
            ]), 0)

        # add the column to the config
        config = self._get_config('add_non_nullable_col')
        self.addCleanup(config.delete)
        config.configured_indicators.append({
            "column_id": "new_date",
            "type": "raw",
            "display_name": "new_date opened",
            "datatype": "datetime",
            "property_name": "other_opened_on",
            "is_nullable": False
        })
        config.save()
        adapter = get_indicator_adapter(config)
        engine = adapter.engine

        # mock rebuild table to ensure the table is rebuilt
        with mock.patch(
                'corehq.apps.userreports.pillow.ConfigurableReportPillowProcessor.rebuild_table'
        ):
            pillow = get_case_pillow(ucr_configs=[config])
            self.assertTrue(pillow.processors[0].rebuild_table.called)
        # column doesn't exist because rebuild table was mocked
        insp = reflection.Inspector.from_engine(engine)
        self.assertEqual(
            len([
                c for c in insp.get_columns(table_name)
                if c['name'] == 'new_date'
            ]), 0)

        # Another time without the mock to ensure the column is there
        pillow = get_case_pillow(ucr_configs=[config])
        insp = reflection.Inspector.from_engine(engine)
        self.assertEqual(
            len([
                c for c in insp.get_columns(table_name)
                if c['name'] == 'new_date'
            ]), 1)
    def test_add_index(self):
        # build the table without an index
        self._setup_data_source('add_index')

        insp = reflection.Inspector.from_engine(self.engine)
        table_name = get_table_name(self.config.domain, self.config.table_id)
        self.assertEqual(len(insp.get_indexes(table_name)), 0)

        # add the index to the config
        config = self._get_config('add_index')
        self.addCleanup(config.delete)
        config.configured_indicators[0]['create_index'] = True
        config.save()
        adapter = get_indicator_adapter(config)

        # mock rebuild table to ensure the table isn't rebuilt when adding index
        pillow = get_case_pillow(ucr_configs=[config])
        pillow.processors[0].rebuild_table = mock.MagicMock()
        pillow.processors[0].bootstrap([config])

        self.assertFalse(pillow.processors[0].rebuild_table.called)
        engine = adapter.engine
        insp = reflection.Inspector.from_engine(engine)
        # note the index is not yet created
        self.assertEqual(len(insp.get_indexes(table_name)), 0)
Example #7
0
    def setUp(self):
        self.config = get_data_source_with_related_doc_type()
        self.config.save()
        self.pillow = get_case_pillow(topics=['case-sql'], ucr_configs=[self.config], processor_chunk_size=0)
        self.adapter = get_indicator_adapter(self.config)

        self.pillow.get_change_feed().get_latest_offsets()
Example #8
0
    def setUp(self):
        self.config = get_data_source_with_related_doc_type()
        self.config.save()
        self.pillow = get_case_pillow(topics=['case-sql'], ucr_configs=[self.config], processor_chunk_size=0)
        self.adapter = get_indicator_adapter(self.config)

        self.pillow.get_change_feed().get_latest_offsets()
Example #9
0
    def test_add_nullable_column(self):
        self._setup_data_source('add_nullable_col')

        # assert new date isn't in the config
        insp = reflection.Inspector.from_engine(self.engine)
        table_name = get_table_name(self.config.domain, self.config.table_id)
        self.assertEqual(
            len([c for c in insp.get_columns(table_name) if c['name'] == 'new_date']), 0
        )

        # add the column to the config
        config = self._get_config('add_nullable_col')
        self.addCleanup(config.delete)
        config.configured_indicators.append({
            "column_id": "new_date",
            "type": "raw",
            "display_name": "new_date opened",
            "datatype": "datetime",
            "property_name": "other_opened_on",
            "is_nullable": True
        })
        config.save()
        adapter = get_indicator_adapter(config)
        engine = adapter.engine

        # mock rebuild table to ensure the column is added without rebuild table
        pillow = get_case_pillow(ucr_configs=[config])
        pillow.processors[0].rebuild_table = mock.MagicMock()
        self.assertFalse(pillow.processors[0].rebuild_table.called)
        insp = reflection.Inspector.from_engine(engine)
        self.assertEqual(
            len([c for c in insp.get_columns(table_name) if c['name'] == 'new_date']), 1
        )
Example #10
0
def _get_pillow(configs, processor_chunk_size=0):
    pillow = get_case_pillow(processor_chunk_size=processor_chunk_size)
    # overwrite processors since we're only concerned with UCR here
    ucr_processor = ConfigurableReportPillowProcessor(data_source_providers=[])
    ucr_processor.bootstrap(configs)
    pillow.processors = [ucr_processor]
    return pillow
Example #11
0
def _get_pillow(configs, processor_chunk_size=0):
    pillow = get_case_pillow(processor_chunk_size=processor_chunk_size)
    # overwrite processors since we're only concerned with UCR here
    ucr_processor = ConfigurableReportPillowProcessor(data_source_providers=[])
    ucr_processor.bootstrap(configs)
    pillow.processors = [ucr_processor]
    return pillow
Example #12
0
 def setUpClass(cls):
     super(AsyncIndicatorTest, cls).setUpClass()
     cls.config = get_data_source_with_related_doc_type()
     cls.config.asynchronous = True
     cls.config.save()
     cls.adapter = get_indicator_adapter(cls.config)
     cls.pillow = get_case_pillow(ucr_configs=[cls.config])
     cls.pillow.get_change_feed().get_latest_offsets()
Example #13
0
 def setUpClass(cls):
     super(AsyncIndicatorTest, cls).setUpClass()
     cls.config = get_data_source_with_related_doc_type()
     cls.config.asynchronous = True
     cls.config.save()
     cls.adapter = get_indicator_adapter(cls.config)
     cls.pillow = get_case_pillow(ucr_configs=[cls.config])
     cls.pillow.get_change_feed().get_latest_offsets()
Example #14
0
 def setUpClass(cls):
     super(ChunkedUCRProcessorTest, cls).setUpClass()
     cls.config = get_sample_data_source()
     cls.config.save()
     cls.adapter = get_indicator_adapter(cls.config)
     cls.adapter.build_table()
     cls.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886)
     cls.pillow = get_case_pillow(processor_chunk_size=100, ucr_configs=[cls.config])
Example #15
0
 def setUpClass(cls):
     super(ChunkedUCRProcessorTest, cls).setUpClass()
     cls.config = get_sample_data_source()
     cls.config.save()
     cls.adapter = get_indicator_adapter(cls.config)
     cls.adapter.build_table()
     cls.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886)
     cls.pillow = get_case_pillow(processor_chunk_size=100, ucr_configs=[cls.config])
    def setUp(self):
        super(CaseSearchPillowTest, self).setUp()
        FormProcessorTestUtils.delete_all_cases()
        self.elasticsearch = get_es_new()
        self.pillow = get_case_pillow(skip_ucr=True)
        ensure_index_deleted(CASE_SEARCH_INDEX)

        # Bootstrap ES
        initialize_index_and_mapping(get_es_new(), CASE_SEARCH_INDEX_INFO)
Example #17
0
    def setUp(self):
        super(CaseSearchPillowTest, self).setUp()
        FormProcessorTestUtils.delete_all_cases()
        self.elasticsearch = get_es_new()
        self.pillow = get_case_pillow(skip_ucr=True)
        ensure_index_deleted(CASE_SEARCH_INDEX)

        # Bootstrap ES
        initialize_index_and_mapping(get_es_new(), CASE_SEARCH_INDEX_INFO)
Example #18
0
    def test_pillow_save_to_one_database_at_a_time(self):
        pillow = get_case_pillow(ucr_configs=[self.ds_1])

        sample_doc, _ = get_sample_doc_and_indicators()
        pillow.process_change(doc_to_change(sample_doc))

        self.assertEqual(1, self.ds1_adapter.get_query_object().count())
        self.assertEqual(0, self.ds2_adapter.get_query_object().count())

        # save to the other
        pillow = get_case_pillow(ucr_configs=[self.ds_2])
        orig_id = sample_doc['_id']
        sample_doc['_id'] = uuid.uuid4().hex
        pillow.process_change(doc_to_change(sample_doc))
        self.assertEqual(1, self.ds1_adapter.get_query_object().count())
        self.assertEqual(1, self.ds2_adapter.get_query_object().count())
        self.assertEqual(1, self.ds1_adapter.get_query_object().filter_by(doc_id=orig_id).count())
        self.assertEqual(1, self.ds2_adapter.get_query_object().filter_by(doc_id=sample_doc['_id']).count())
Example #19
0
 def test_pillow_save_to_multiple_databases(self):
     self.assertNotEqual(self.ds1_adapter.engine.url, self.ds2_adapter.engine.url)
     pillow = get_case_pillow(ucr_configs=[self.ds_1, self.ds_2])
     self.assertNotEqual(self.ds1_adapter.engine.url, self.ds2_adapter.engine.url)
     sample_doc, _ = get_sample_doc_and_indicators()
     pillow.process_change(doc_to_change(sample_doc))
     self.assertNotEqual(self.ds1_adapter.engine.url, self.ds2_adapter.engine.url)
     self.assertEqual(1, self.ds1_adapter.get_query_object().count())
     self.assertEqual(1, self.ds2_adapter.get_query_object().count())
Example #20
0
    def test_pillow_save_to_one_database_at_a_time(self):
        pillow = get_case_pillow(ucr_configs=[self.ds_1])

        sample_doc, _ = get_sample_doc_and_indicators()
        pillow.process_change(doc_to_change(sample_doc))

        self.assertEqual(1, self.ds1_adapter.get_query_object().count())
        self.assertEqual(0, self.ds2_adapter.get_query_object().count())

        # save to the other
        pillow = get_case_pillow(ucr_configs=[self.ds_2])
        orig_id = sample_doc['_id']
        sample_doc['_id'] = uuid.uuid4().hex
        pillow.process_change(doc_to_change(sample_doc))
        self.assertEqual(1, self.ds1_adapter.get_query_object().count())
        self.assertEqual(1, self.ds2_adapter.get_query_object().count())
        self.assertEqual(1, self.ds1_adapter.get_query_object().filter_by(doc_id=orig_id).count())
        self.assertEqual(1, self.ds2_adapter.get_query_object().filter_by(doc_id=sample_doc['_id']).count())
Example #21
0
 def test_pillow_save_to_multiple_databases(self):
     self.assertNotEqual(self.ds1_adapter.engine.url, self.ds2_adapter.engine.url)
     pillow = get_case_pillow(ucr_configs=[self.ds_1, self.ds_2])
     self.assertNotEqual(self.ds1_adapter.engine.url, self.ds2_adapter.engine.url)
     sample_doc, _ = get_sample_doc_and_indicators()
     pillow.process_change(doc_to_change(sample_doc))
     self.assertNotEqual(self.ds1_adapter.engine.url, self.ds2_adapter.engine.url)
     self.assertEqual(1, self.ds1_adapter.get_query_object().count())
     self.assertEqual(1, self.ds2_adapter.get_query_object().count())
Example #22
0
    def test_skip_destructive_rebuild(self):
        self.config = self._get_config('add_non_nullable_col')
        self.config.disable_destructive_rebuild = True
        self.config.save()

        get_case_pillow(ucr_configs=[self.config])
        self.adapter = get_indicator_adapter(self.config)
        self.engine = self.adapter.engine

        # assert new date isn't in the config
        insp = reflection.Inspector.from_engine(self.engine)
        table_name = get_table_name(self.config.domain, self.config.table_id)
        self.assertEqual(
            len([
                c for c in insp.get_columns(table_name)
                if c['name'] == 'new_date'
            ]), 0)

        # add the column to the config
        self.config.configured_indicators.append({
            "column_id": "new_date",
            "type": "raw",
            "display_name": "new_date opened",
            "datatype": "datetime",
            "property_name": "other_opened_on",
            "is_nullable": False
        })
        self.config.save()

        # re-fetch from DB to bust object caches
        self.config = DataSourceConfiguration.get(self.config.data_source_id)

        # bootstrap to trigger rebuild
        get_case_pillow(ucr_configs=[self.config])

        logs = DataSourceActionLog.objects.filter(
            indicator_config_id=self.config.data_source_id,
            skip_destructive=True)
        self.assertEqual(1, len(logs))
        self.assertEqual(logs[0].migration_diffs, [{
            'type': 'add_column',
            'item_name': 'new_date'
        }])
Example #23
0
    def test_add_non_nullable_column(self):
        self._setup_data_source('add_non_nullable_col')

        # assert new date isn't in the config
        insp = reflection.Inspector.from_engine(self.engine)
        table_name = get_table_name(self.config.domain, self.config.table_id)
        self.assertEqual(
            len([c for c in insp.get_columns(table_name) if c['name'] == 'new_date']), 0
        )

        # add the column to the config
        config = self._get_config('add_non_nullable_col')
        self.addCleanup(config.delete)
        config.configured_indicators.append({
            "column_id": "new_date",
            "type": "raw",
            "display_name": "new_date opened",
            "datatype": "datetime",
            "property_name": "other_opened_on",
            "is_nullable": False
        })
        config.save()
        adapter = get_indicator_adapter(config)
        engine = adapter.engine

        # mock rebuild table to ensure the table is rebuilt
        with mock.patch('corehq.apps.userreports.pillow.ConfigurableReportPillowProcessor.rebuild_table'):
            pillow = get_case_pillow(ucr_configs=[config])
            self.assertTrue(pillow.processors[0].rebuild_table.called)
        # column doesn't exist because rebuild table was mocked
        insp = reflection.Inspector.from_engine(engine)
        self.assertEqual(
            len([c for c in insp.get_columns(table_name) if c['name'] == 'new_date']), 0
        )

        # Another time without the mock to ensure the column is there
        pillow = get_case_pillow(ucr_configs=[config])
        insp = reflection.Inspector.from_engine(engine)
        self.assertEqual(
            len([c for c in insp.get_columns(table_name) if c['name'] == 'new_date']), 1
        )
Example #24
0
    def test_ordered_pk(self):
        self._setup_data_source('ordered_pk')
        config = self._get_config('ordered_pk')
        config.configured_indicators.append({
            "column_id": "pk_key",
            "type": "raw",
            "datatype": "string",
            "property_name": "owner_id",
            "is_primary_key": True
        })
        config.sql_settings.primary_key = ['pk_key', 'doc_id']
        config.save()

        get_case_pillow(ucr_configs=[config])
        adapter = get_indicator_adapter(config)
        engine = adapter.engine
        insp = reflection.Inspector.from_engine(engine)
        table_name = get_table_name(self.config.domain, self.config.table_id)
        pk = insp.get_pk_constraint(table_name)
        expected_pk = ['pk_key', 'doc_id']
        self.assertEqual(expected_pk, pk['constrained_columns'])
Example #25
0
    def test_add_nullable_column(self):
        self._setup_data_source('add_nullable_col')

        # assert new date isn't in the config
        insp = reflection.Inspector.from_engine(self.engine)
        table_name = get_table_name(self.config.domain, self.config.table_id)
        self.assertEqual(
            len([
                c for c in insp.get_columns(table_name)
                if c['name'] == 'new_date'
            ]), 0)

        # add the column to the config
        config = self._get_config('add_nullable_col')
        self.addCleanup(config.delete)
        config.configured_indicators.append({
            "column_id": "new_date",
            "type": "raw",
            "display_name": "new_date opened",
            "datatype": "datetime",
            "property_name": "other_opened_on",
            "is_nullable": True
        })
        config.save()
        adapter = get_indicator_adapter(config)
        engine = adapter.engine

        # mock rebuild table to ensure the column is added without rebuild table
        with mock.patch('corehq.apps.userreports.pillow_utils.rebuild_table'
                        ) as rebuild_table:
            get_case_pillow(ucr_configs=[config])
            self.assertFalse(rebuild_table.called)
        insp = reflection.Inspector.from_engine(engine)
        self.assertEqual(
            len([
                c for c in insp.get_columns(table_name)
                if c['name'] == 'new_date'
            ]), 1)
Example #26
0
    def _add_rows(self, rows):
        pillow = get_case_pillow(ucr_configs=[self.data_source])

        def _get_case(row):
            return {
                '_id': uuid.uuid4().hex,
                'domain': self.domain,
                'doc_type': 'CommCareCase',
                'type': 'city',
                'name': row.name,
                'number': row.number,
                'just_for_sorting': row.sort_key,
            }
        for row in rows:
            pillow.process_change(doc_to_change(_get_case(row)))
Example #27
0
    def test_mirroring(self):
        ds3 = DataSourceConfiguration.wrap(get_sample_data_source().to_json())
        ds3.engine_id = "default"
        ds3.mirrored_engine_ids = ['engine-2']
        adapter = get_indicator_adapter(ds3)
        self.assertEqual(type(adapter.adapter), MultiDBSqlAdapter)
        self.assertEqual(len(adapter.all_adapters), 2)
        for db_adapter in adapter.all_adapters:
            with db_adapter.session_context() as session:
                self.assertEqual(0, session.query(db_adapter.get_table()).count())

        with patch('pillowtop.models.KafkaCheckpoint.get_or_create_for_checkpoint_id'):
            pillow = get_case_pillow(ucr_configs=[ds3])
        sample_doc, _ = get_sample_doc_and_indicators()
        pillow.process_change(doc_to_change(sample_doc))

        for db_adapter in adapter.all_adapters:
            with db_adapter.session_context() as session:
                self.assertEqual(1, session.query(db_adapter.get_table()).count())
Example #28
0
    def test_mirroring(self):
        ds3 = DataSourceConfiguration.wrap(get_sample_data_source().to_json())
        ds3.engine_id = DEFAULT_ENGINE_ID
        ds3.mirrored_engine_ids = ['engine-2']
        adapter = get_indicator_adapter(ds3)
        self.assertEqual(type(adapter.adapter), MultiDBSqlAdapter)
        self.assertEqual(len(adapter.all_adapters), 2)
        for db_adapter in adapter.all_adapters:
            with db_adapter.session_context() as session:
                self.assertEqual(0, session.query(db_adapter.get_table()).count())

        with patch('pillowtop.models.KafkaCheckpoint.get_or_create_for_checkpoint_id'):
            pillow = get_case_pillow(ucr_configs=[ds3])
        sample_doc, _ = get_sample_doc_and_indicators()
        pillow.process_change(doc_to_change(sample_doc))

        for db_adapter in adapter.all_adapters:
            with db_adapter.session_context() as session:
                self.assertEqual(1, session.query(db_adapter.get_table()).count())
Example #29
0
    def test_add_index(self):
        # build the table without an index
        self._setup_data_source('add_index')

        insp = reflection.Inspector.from_engine(self.engine)
        table_name = get_table_name(self.config.domain, self.config.table_id)
        self.assertEqual(len(insp.get_indexes(table_name)), 0)

        # add the index to the config
        config = self._get_config('add_index')
        self.addCleanup(config.delete)
        config.configured_indicators[0]['create_index'] = True
        config.save()
        adapter = get_indicator_adapter(config)

        # mock rebuild table to ensure the table isn't rebuilt when adding index
        pillow = get_case_pillow(ucr_configs=[config])
        pillow.processors[0].rebuild_table = mock.MagicMock()
        pillow.processors[0].bootstrap([config])

        self.assertFalse(pillow.processors[0].rebuild_table.called)
        engine = adapter.engine
        insp = reflection.Inspector.from_engine(engine)
        self.assertEqual(len(insp.get_indexes(table_name)), 1)
Example #30
0
 def test_process_doc_from_couch_chunked(self, datetime_mock):
     self._test_process_doc_from_couch(
         datetime_mock,
         get_case_pillow(processor_chunk_size=100,
                         ucr_configs=[self.config]))
Example #31
0
 def setUpClass(cls):
     super(ChunkedAsyncIndicatorTest, cls).setUpClass()
     cls.pillow = get_case_pillow(processor_chunk_size=100,
                                  ucr_configs=[cls.config])
Example #32
0
    def _process_docs(self, docs):
        pillow = get_case_pillow(ucr_configs=[self.data_source])

        for doc in docs:
            pillow.process_change(doc_to_change(doc))
Example #33
0
 def test_process_doc_from_sql_stale_chunked(self):
     pillow = get_case_pillow(topics=['case-sql'],
                              processor_chunk_size=100,
                              ucr_configs=[self.config])
     # one less query in chunked mode, as two cases are looked up in single query
     self._test_process_doc_from_sql_stale(pillow, num_queries=11)
Example #34
0
 def setUp(self):
     with patch('pillowtop.checkpoints.manager.get_or_create_checkpoint'):
         self.pillow = get_case_pillow(skip_ucr=True)
Example #35
0
 def _setup_data_source(self, extra_id):
     self.config = self._get_config(extra_id)
     self.config.save()
     get_case_pillow(ucr_configs=[self.config])
     self.adapter = get_indicator_adapter(self.config)
     self.engine = self.adapter.engine
    def _process_docs(self, docs):
        pillow = get_case_pillow(ucr_configs=[self.data_source])

        for doc in docs:
            pillow.process_change(doc_to_change(doc))
Example #37
0
 def setUpClass(cls):
     super(ChunkedAsyncIndicatorTest, cls).setUpClass()
     cls.pillow = get_case_pillow(processor_chunk_size=100, ucr_configs=[cls.config])
Example #38
0
 def test_process_doc_from_couch_chunked(self, datetime_mock):
     self._test_process_doc_from_couch(datetime_mock,
         get_case_pillow(processor_chunk_size=100, ucr_configs=[self.config]))
Example #39
0
 def test_reuse_cache_chunked(self):
     pillow1 = get_case_pillow(topics=['case-sql'], processor_chunk_size=100, ucr_configs=self.configs[:1])
     pillow2 = get_case_pillow(topics=['case-sql'], processor_chunk_size=100, ucr_configs=self.configs)
     self._test_reuse_cache(pillow1, pillow2, 11)
Example #40
0
 def test_process_deleted_doc_from_sql_chunked(self, datetime_mock):
     self.pillow = get_case_pillow(processor_chunk_size=100, ucr_configs=[self.config])
     self._test_process_deleted_doc_from_sql(datetime_mock)
     self.pillow = get_case_pillow(processor_chunk_size=0, ucr_configs=[self.config])
Example #41
0
 def test_process_doc_from_sql_stale_chunked(self):
     pillow = get_case_pillow(topics=['case-sql'], processor_chunk_size=100, ucr_configs=[self.config])
     # one less query in chunked mode, as two cases are looked up in single query
     self._test_process_doc_from_sql_stale(pillow, num_queries=11)
Example #42
0
 def test_process_deleted_doc_from_sql_chunked(self, datetime_mock):
     self.pillow = get_case_pillow(processor_chunk_size=100,
                                   ucr_configs=[self.config])
     self._test_process_deleted_doc_from_sql(datetime_mock)
     self.pillow = get_case_pillow(processor_chunk_size=0,
                                   ucr_configs=[self.config])
Example #43
0
 def setUp(self):
     with patch('pillowtop.checkpoints.manager.get_or_create_checkpoint'):
         self.pillow = get_case_pillow(skip_ucr=True)
 def _setup_data_source(self, extra_id):
     self.config = self._get_config(extra_id)
     self.config.save()
     get_case_pillow(ucr_configs=[self.config])
     self.adapter = get_indicator_adapter(self.config)
     self.engine = self.adapter.engine