def test_add_index(self): # build the table without an index config = get_sample_data_source() config.save() self.addCleanup(config.delete) pillow = get_kafka_ucr_pillow() pillow.bootstrap([config]) adapter = get_indicator_adapter(config) engine = adapter.engine insp = reflection.Inspector.from_engine(engine) table_name = get_table_name(config.domain, config.table_id) self.assertEqual(len(insp.get_indexes(table_name)), 0) # add the index to the config config = get_sample_data_source() self.addCleanup(config.delete) config.configured_indicators[0]['create_index'] = True config.save() adapter = get_indicator_adapter(config) # mock rebuild table to ensure the table isn't rebuilt when adding index pillow = get_kafka_ucr_pillow() pillow.processors[0].rebuild_table = MagicMock() pillow.bootstrap([config]) self.assertFalse(pillow.processors[0].rebuild_table.called) engine = adapter.engine insp = reflection.Inspector.from_engine(engine) self.assertEqual(len(insp.get_indexes(table_name)), 1)
def test_complete_integration(self): # initialize pillow with one data source data_source_1 = get_sample_data_source() data_source_1.save() ds_1_domain = data_source_1.domain table_manager = ConfigurableReportTableManagerMixin( [DynamicDataSourceProvider()]) table_manager.bootstrap() self.assertEqual(1, len(table_manager.table_adapters_by_domain)) self.assertEqual( 1, len(table_manager.table_adapters_by_domain[ds_1_domain])) self.assertEqual( data_source_1._id, table_manager.table_adapters_by_domain[ds_1_domain][0].config._id) data_source_2 = self._copy_data_source(data_source_1) data_source_2.save() self.assertFalse(table_manager.needs_bootstrap()) # should call _pull_in_new_and_modified_data_sources table_manager.bootstrap_if_needed() self.assertEqual(1, len(table_manager.table_adapters_by_domain)) self.assertEqual( 2, len(table_manager.table_adapters_by_domain[ds_1_domain])) self.assertEqual( {data_source_1._id, data_source_2._id}, set([ table_adapter.config._id for table_adapter in table_manager.table_adapters_by_domain[ds_1_domain] ]))
def test_linked_reports_updated(self): # add a report on the master app master_data_source = get_sample_data_source() master_data_source.domain = self.domain master_data_source.save() master_report = get_sample_report_config() master_report.config_id = master_data_source.get_id master_report.domain = self.domain master_report.save() master_reports_module = self.master1.add_module( ReportModule.new_module('Reports', None)) master_reports_module.report_configs = [ ReportAppConfig(report_id=master_report.get_id, header={'en': 'CommBugz'}), ] # link report on master app to linked domain link_info = create_linked_ucr(self.domain_link, master_report.get_id) updated_app = update_linked_app(self.linked_app, self.master1, 'a-user-id') # report config added with the linked report id updated in report config self.assertEqual(updated_app.modules[0].report_configs[0].report_id, link_info.report.get_id)
def setUpClass(cls): cls.db2_name = 'cchq_ucr_tests' db_conn_parts = settings.SQL_REPORTING_DATABASE_URL.split('/') db_conn_parts[-1] = cls.db2_name cls.db2_url = '/'.join(db_conn_parts) # setup patches cls.connection_string_patch = patch('corehq.sql_db.connections.connection_manager.get_connection_string') def connection_string_for_engine(engine_id): if engine_id == 'engine-1': return settings.SQL_REPORTING_DATABASE_URL else: return cls.db2_url mock_manager = cls.connection_string_patch.start() mock_manager.side_effect = connection_string_for_engine # setup data sources data_source_template = get_sample_data_source() cls.ds_1 = DataSourceConfiguration.wrap(data_source_template.to_json()) cls.ds_1.engine_id = 'engine-1' cls.ds_1.save() cls.ds_2 = DataSourceConfiguration.wrap(data_source_template.to_json()) cls.ds_2.engine_id = 'engine-2' cls.ds_2.save() cls.db_context = temporary_database(cls.db2_name) cls.db_context.__enter__() cls.ds1_adapter = IndicatorSqlAdapter(cls.ds_1) cls.ds2_adapter = IndicatorSqlAdapter(cls.ds_2)
def setUpClass(cls): super(UCRMultiDBTest, cls).setUpClass() cls.db2_name = 'cchq_ucr_tests' default_db_url = connections.connection_manager.get_connection_string(DEFAULT_DB_ALIAS) db_conn_parts = default_db_url.split('/') db_conn_parts[-1] = cls.db2_name cls.db2_url = '/'.join(db_conn_parts) cls.context_managers = ExitStack() cls.context_managers.enter_context(connections.override_engine('engine-1', default_db_url, 'default')) cls.context_managers.enter_context(connections.override_engine('engine-2', cls.db2_url, cls.db2_name)) # setup data sources data_source_template = get_sample_data_source() cls.ds_1 = DataSourceConfiguration.wrap(data_source_template.to_json()) cls.ds_1.engine_id = 'engine-1' cls.ds_1.save() cls.ds_2 = DataSourceConfiguration.wrap(data_source_template.to_json()) cls.ds_2.engine_id = 'engine-2' cls.ds_2.save() cls.context_managers.enter_context(temporary_database(cls.db2_name)) cls.ds1_adapter = get_indicator_adapter(cls.ds_1) cls.ds2_adapter = get_indicator_adapter(cls.ds_2)
def setUp(self): self.config = get_sample_data_source() self.config.save() self.pillow = ConfigurableIndicatorPillow() self.pillow.bootstrap(configs=[self.config]) self.adapter = IndicatorSqlAdapter(self.config) self.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886)
def setUpClass(cls): super(ChunkedUCRProcessorTest, cls).setUpClass() cls.config = get_sample_data_source() cls.config.save() cls.adapter = get_indicator_adapter(cls.config) cls.adapter.build_table() cls.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886) cls.pillow = get_case_pillow(processor_chunk_size=100, ucr_configs=[cls.config])
def setUpClass(cls): super(IndicatorPillowTest, cls).setUpClass() cls.config = get_sample_data_source() cls.config.save() cls.adapter = get_indicator_adapter(cls.config) cls.adapter.build_table() cls.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886) cls.pillow = get_kafka_ucr_pillow()
def setUpClass(cls): super(IndicatorPillowTest, cls).setUpClass() cls.config = get_sample_data_source() cls.config.save() cls.adapter = get_indicator_adapter(cls.config) cls.adapter.build_table() cls.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886) cls.pillow = get_case_pillow(processor_chunk_size=0, ucr_configs=[cls.config])
def setUpClass(cls): super(ChunkedUCRProcessorTest, cls).setUpClass() cls.config = get_sample_data_source() cls.config.save() cls.adapter = get_indicator_adapter(cls.config) cls.adapter.build_table() cls.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886) cls.pillow = _get_pillow([cls.config], processor_chunk_size=100)
def setUpClass(cls): super(IndicatorPillowTest, cls).setUpClass() cls.config = get_sample_data_source() cls.config.save() cls.adapter = get_indicator_adapter(cls.config) cls.adapter.build_table() cls.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886) cls.pillow = _get_pillow([cls.config])
def setUpClass(cls): super(DataSourceConfigurationPartitionTest, cls).setUpClass() cls.data_source = get_sample_data_source() cls.data_source.sql_settings.partition_config = [ SQLPartition(column=cls.column, subtype=cls.subtype, constraint=cls.constraint) ] cls.data_source.save() cls.adapter = get_indicator_adapter(cls.data_source) cls.adapter.build_table()
def setUpClass(cls): super(DataSourceConfigurationPartitionTest, cls).setUpClass() cls.data_source = get_sample_data_source() cls.data_source.sql_settings.partition_config = [ SQLPartition(column=cls.column, subtype=cls.subtype, constraint=cls.constraint) ] cls.data_source.save() cls.adapter = get_indicator_adapter(cls.data_source) cls.adapter.build_table()
def _create_new_report(self): self.data_source = get_sample_data_source() self.data_source.domain = self.domain self.data_source.save() self.report = get_sample_report_config() self.report.config_id = self.data_source.get_id self.report.domain = self.domain self.report.save() return self.report
def test_table_adapters(self): data_source_1 = get_sample_data_source() ds_1_domain = data_source_1.domain table_manager = ConfigurableReportTableManagerMixin([MockDataSourceProvider({ ds_1_domain: [data_source_1] })]) table_manager.bootstrap() self.assertEqual(1, len(table_manager.table_adapters_by_domain)) self.assertEqual(1, len(table_manager.table_adapters_by_domain[ds_1_domain])) self.assertEqual(data_source_1, table_manager.table_adapters_by_domain[ds_1_domain][0].config)
def test_merge_table_adapters(self): data_source_1 = get_sample_data_source() data_source_1.save() ds_1_domain = data_source_1.domain table_manager = ConfigurableReportTableManagerMixin( [MockDataSourceProvider({ds_1_domain: [data_source_1]})]) table_manager.bootstrap() # test in same domain data_source_2 = self._copy_data_source(data_source_1) data_source_2.save() table_manager._add_data_sources_to_table_adapters([data_source_2]) self.assertEqual(1, len(table_manager.table_adapters_by_domain)) self.assertEqual( 2, len(table_manager.table_adapters_by_domain[ds_1_domain])) self.assertEqual( {data_source_1, data_source_2}, set([ table_adapter.config for table_adapter in table_manager.table_adapters_by_domain[ds_1_domain] ])) # test in a new domain data_source_3 = self._copy_data_source(data_source_1) ds3_domain = 'new_domain' data_source_3.domain = ds3_domain data_source_3.save() table_manager._add_data_sources_to_table_adapters([data_source_3]) # should now be 2 domains in the map self.assertEqual(2, len(table_manager.table_adapters_by_domain)) # ensure domain 1 unchanged self.assertEqual( {data_source_1, data_source_2}, set([ table_adapter.config for table_adapter in table_manager.table_adapters_by_domain[ds_1_domain] ])) self.assertEqual( 1, len(table_manager.table_adapters_by_domain[ds3_domain])) self.assertEqual( data_source_3, table_manager.table_adapters_by_domain[ds3_domain][0].config) # finally pass in existing data sources and ensure they modify in place table_manager._add_data_sources_to_table_adapters( [data_source_1, data_source_3]) self.assertEqual(2, len(table_manager.table_adapters_by_domain)) self.assertEqual( {data_source_1, data_source_2}, set([ table_adapter.config for table_adapter in table_manager.table_adapters_by_domain[ds_1_domain] ])) self.assertEqual( data_source_3, table_manager.table_adapters_by_domain[ds3_domain][0].config)
def setUp(self): super().setUp() self.data_source = get_sample_data_source() self.data_source.domain = self.domain self.data_source.save() self.report = get_sample_report_config() self.report.config_id = self.data_source.get_id self.report.domain = self.domain self.report.save()
def test_get_filtered_configs_es_error(self): table_manager = ConfigurableReportTableManagerMixin( MockDataSourceProvider(), filter_missing_domains=True) ds1 = get_sample_data_source() ds1.domain = 'domain1' ds2 = DataSourceConfiguration.wrap(ds1.to_json()) ds2.domain = 'domain2' with patch('corehq.apps.es.es_query.run_query') as run_query: run_query.side_effect = ESError filtered_configs = table_manager.get_filtered_configs([ds1, ds2]) self.assertEqual(filtered_configs, [ds1, ds2])
def _create_report(self, master_id=None): data_source = get_sample_data_source() data_source.domain = self.domain data_source.save() self.addCleanup(data_source.delete) report = ReportConfiguration() report.config_id = data_source.get_id report.domain = self.domain report.report_meta = ReportMeta() report.report_meta.master_id = master_id report.save() self.addCleanup(report.delete) return report
def setUpClass(cls): cls.db2_name = 'cchq_ucr_tests' db_conn_parts = settings.SQL_REPORTING_DATABASE_URL.split('/') db_conn_parts[-1] = cls.db2_name cls.db2_url = '/'.join(db_conn_parts) # setup patches cls.engine_id_patches = ( # unfortunately we need to patch this directly in modules that import it as well patch('corehq.apps.userreports.sql.connection.get_engine_id'), patch('corehq.apps.userreports.sql.adapter.get_engine_id'), patch('corehq.apps.userreports.reports.data_source.get_engine_id'), ) cls.connection_string_patch = patch( 'corehq.db.connection_manager.get_connection_string') for engine_id_patch in cls.engine_id_patches: mock_engine_id_method = engine_id_patch.start() mock_engine_id_method.side_effect = lambda x: x.engine_id def connection_string_for_engine(engine_id): if engine_id == 'engine-1': return settings.SQL_REPORTING_DATABASE_URL else: return cls.db2_url mock_manager = cls.connection_string_patch.start() mock_manager.side_effect = connection_string_for_engine # setup data sources data_source_template = get_sample_data_source() cls.ds_1 = DataSourceConfiguration.wrap(data_source_template.to_json()) cls.ds_1.engine_id = 'engine-1' cls.ds_1.save() cls.ds_2 = DataSourceConfiguration.wrap(data_source_template.to_json()) cls.ds_2.engine_id = 'engine-2' cls.ds_2.save() # use db1 engine to create db2 http://stackoverflow.com/a/8977109/8207 cls.root_engine = create_engine(settings.SQL_REPORTING_DATABASE_URL) conn = cls.root_engine.connect() conn.execute('commit') try: conn.execute('CREATE DATABASE {}'.format(cls.db2_name)) except ProgrammingError: # optimistically assume it failed because was already created. pass conn.close() cls.ds1_adapter = IndicatorSqlAdapter(cls.ds_1) cls.ds2_adapter = IndicatorSqlAdapter(cls.ds_2)
def setUpClass(cls): cls.db2_name = 'cchq_ucr_tests' db_conn_parts = settings.SQL_REPORTING_DATABASE_URL.split('/') db_conn_parts[-1] = cls.db2_name cls.db2_url = '/'.join(db_conn_parts) # setup patches cls.engine_id_patches = ( # unfortunately we need to patch this directly in modules that import it as well patch('corehq.apps.userreports.sql.connection.get_engine_id'), patch('corehq.apps.userreports.sql.adapter.get_engine_id'), patch('corehq.apps.userreports.reports.data_source.get_engine_id'), ) cls.connection_string_patch = patch('corehq.db.connection_manager.get_connection_string') for engine_id_patch in cls.engine_id_patches: mock_engine_id_method = engine_id_patch.start() mock_engine_id_method.side_effect = lambda x: x.engine_id def connection_string_for_engine(engine_id): if engine_id == 'engine-1': return settings.SQL_REPORTING_DATABASE_URL else: return cls.db2_url mock_manager = cls.connection_string_patch.start() mock_manager.side_effect = connection_string_for_engine # setup data sources data_source_template = get_sample_data_source() cls.ds_1 = DataSourceConfiguration.wrap(data_source_template.to_json()) cls.ds_1.engine_id = 'engine-1' cls.ds_1.save() cls.ds_2 = DataSourceConfiguration.wrap(data_source_template.to_json()) cls.ds_2.engine_id = 'engine-2' cls.ds_2.save() # use db1 engine to create db2 http://stackoverflow.com/a/8977109/8207 cls.root_engine = create_engine(settings.SQL_REPORTING_DATABASE_URL) conn = cls.root_engine.connect() conn.execute('commit') try: conn.execute('CREATE DATABASE {}'.format(cls.db2_name)) except ProgrammingError: # optimistically assume it failed because was already created. pass conn.close() cls.ds1_adapter = IndicatorSqlAdapter(cls.ds_1) cls.ds2_adapter = IndicatorSqlAdapter(cls.ds_2)
def test_dynamic_modified_date(self): config = get_sample_data_source() timestamp_before_save = datetime.datetime.utcnow( ) - datetime.timedelta(seconds=1) config.save() self.addCleanup(config.delete) timestamp_after_save = datetime.datetime.utcnow() + datetime.timedelta( seconds=1) provider = DynamicDataSourceProvider() providers = provider.get_data_sources_modified_since( timestamp_before_save) self.assertEqual(1, len(providers)) self.assertEqual(config._id, providers[0]._id) providers = provider.get_data_sources_modified_since( timestamp_after_save) self.assertEqual(0, len(providers))
def setUpClass(cls): super(DataSourceConfigurationCitusDBTest, cls).setUpClass() cls.data_source = get_sample_data_source() cls.data_source.engine_id = 'icds-ucr' for indicator in cls.data_source.configured_indicators: if indicator['column_id'] == 'owner': indicator['is_primary_key'] = True cls.data_source.sql_settings.citus_config = CitusConfig( distribution_type="hash", distribution_column="owner") cls.data_source.sql_settings.primary_key = ['owner', 'doc_id'] cls.data_source.save() cls.adapter = get_indicator_adapter(cls.data_source) if not cls.adapter.session_helper.is_citus_db: raise SkipTest( "Test only applicable when using CitusDB: {}".format( cls.adapter.session_helper.engine)) cls.adapter.build_table()
def setUp(self): self.config = get_sample_data_source() self.config.validations = [ Validation.wrap({ "name": "is_starred_valid", "error_message": "is_starred has unexpected value", "expression": { "type": "boolean_expression", "expression": { "type": "property_name", "property_name": "is_starred" }, "operator": "in", "property_value": ["yes", "no"] } }) ]
def _create_report_and_datasource(self): master_data_source = get_sample_data_source() master_data_source.domain = self.domain master_data_source.save() master_report = get_sample_report_config() master_report.config_id = master_data_source.get_id master_report.domain = self.domain master_report.save() master_reports_module = self.master1.add_module( ReportModule.new_module('Reports', None)) master_reports_module.report_configs = [ ReportAppConfig(report_id=master_report.get_id, header={'en': 'CommBugz'}), ] return master_report, master_data_source
def setUp(self): self.config = get_sample_data_source() self.config.validations = [ Validation.wrap({ "name": "is_starred_valid", "error_message": "is_starred has unexpected value", "expression": { "type": "boolean_expression", "expression": { "type": "property_name", "property_name": "is_starred" }, "operator": "in", "property_value": ["yes", "no"] } }) ]
def setUpClass(cls) -> None: AllowedUCRExpressionSettings.save_allowed_ucr_expressions( 'domain_nopermission', []) AllowedUCRExpressionSettings.save_allowed_ucr_expressions( 'domain_baseitem', ['base_item_expression']) AllowedUCRExpressionSettings.save_allowed_ucr_expressions( 'domain_related_doc', ['related_doc']) AllowedUCRExpressionSettings.save_allowed_ucr_expressions( 'domain_both', ['related_doc', 'base_item_expression']) cls.config = get_sample_data_source() cls.config = cls.config.to_json() cls.config['configured_indicators'].append({ "type": "expression", "is_primary_key": False, "is_nullable": True, "datatype": "string", "expression": { "value_expression": { "datatype": None, "type": "property_name", "property_name": "name" }, "type": "related_doc", "related_doc_type": "Location", "doc_id_expression": { "datatype": None, "type": "property_name", "property_name": "health_post_id" } }, "column_id": "health_post_name" }) cls.config['base_item_expression'] = { "datatype": None, "property_name": "actions", "type": "property_name" } cls.config = DataSourceConfiguration.wrap(cls.config) return super().setUpClass()
def test_mirroring(self): ds3 = DataSourceConfiguration.wrap(get_sample_data_source().to_json()) ds3.engine_id = "default" ds3.mirrored_engine_ids = ['engine-2'] adapter = get_indicator_adapter(ds3) self.assertEqual(type(adapter.adapter), MultiDBSqlAdapter) self.assertEqual(len(adapter.all_adapters), 2) for db_adapter in adapter.all_adapters: with db_adapter.session_context() as session: self.assertEqual(0, session.query(db_adapter.get_table()).count()) with patch('pillowtop.models.KafkaCheckpoint.get_or_create_for_checkpoint_id'): pillow = get_case_pillow(ucr_configs=[ds3]) sample_doc, _ = get_sample_doc_and_indicators() pillow.process_change(doc_to_change(sample_doc)) for db_adapter in adapter.all_adapters: with db_adapter.session_context() as session: self.assertEqual(1, session.query(db_adapter.get_table()).count())
def setUpClass(cls): cls.data_source = get_sample_data_source() cls.data_source.engine_id = ICDS_UCR_CITUS_ENGINE_ID for indicator in cls.data_source.configured_indicators: if indicator['column_id'] == 'owner': indicator['is_primary_key'] = True cls.data_source.sql_settings.citus_config = CitusConfig( distribution_type="hash", distribution_column="owner") cls.data_source.sql_settings.primary_key = ['owner', 'doc_id'] cls.adapter = get_indicator_adapter(cls.data_source) if not cls.adapter.session_helper.is_citus_db: raise SkipTest( "Test only applicable when using CitusDB: {}".format( cls.adapter.session_helper.engine)) # SkipTest must come before this setup so that the database is only setup if the test is not skipped super(DataSourceConfigurationCitusDBTest, cls).setUpClass() cls.data_source.save() cls.adapter.build_table()
def test_mirroring(self): ds3 = DataSourceConfiguration.wrap(get_sample_data_source().to_json()) ds3.engine_id = DEFAULT_ENGINE_ID ds3.mirrored_engine_ids = ['engine-2'] adapter = get_indicator_adapter(ds3) self.assertEqual(type(adapter.adapter), MultiDBSqlAdapter) self.assertEqual(len(adapter.all_adapters), 2) for db_adapter in adapter.all_adapters: with db_adapter.session_context() as session: self.assertEqual(0, session.query(db_adapter.get_table()).count()) with patch('pillowtop.models.KafkaCheckpoint.get_or_create_for_checkpoint_id'): pillow = get_case_pillow(ucr_configs=[ds3]) sample_doc, _ = get_sample_doc_and_indicators() pillow.process_change(doc_to_change(sample_doc)) for db_adapter in adapter.all_adapters: with db_adapter.session_context() as session: self.assertEqual(1, session.query(db_adapter.get_table()).count())
def test_bad_spec_error(self, _): ExpressionFactory.register("missing_expression", lambda x, y: x) data_source_1 = get_sample_data_source() data_source_1.configured_indicators[0] = { "column_id": "date", "type": "expression", "expression": { "type": "missing_expression", }, "datatype": "datetime" } data_source_1.save() del ExpressionFactory.spec_map["missing_expression"] ds_1_domain = data_source_1.domain table_manager = ConfigurableReportTableManagerMixin( [DynamicDataSourceProvider()]) table_manager.bootstrap() self.assertEqual(0, len(table_manager.table_adapters_by_domain)) self.assertEqual( 0, len(table_manager.table_adapters_by_domain[ds_1_domain]))
def test_get_filtered_configs(self): table_manager = ConfigurableReportTableManagerMixin( MockDataSourceProvider(), filter_missing_domains=True) ds1 = get_sample_data_source() ds1.domain = 'domain1' ds2 = DataSourceConfiguration.wrap(ds1.to_json()) ds2.domain = 'domain2' with patch('corehq.apps.es.es_query.run_query') as run_query: run_query.return_value = { 'hits': { 'hits': [{ '_id': 'd1', '_source': { 'name': 'domain1' } }] } } filtered_configs = table_manager.get_filtered_configs([ds1, ds2]) self.assertEqual(filtered_configs, [ds1])
def setUpClass(cls): super(UCRMultiDBTest, cls).setUpClass() cls.db2_name = 'cchq_ucr_tests' db_conn_parts = connections.connection_manager.get_connection_string('default').split('/') db_conn_parts[-1] = cls.db2_name cls.db2_url = '/'.join(db_conn_parts) cls.context_manager = connections.override_engine('engine-2', cls.db2_url, cls.db2_name) cls.context_manager.__enter__() # setup data sources data_source_template = get_sample_data_source() cls.ds_1 = DataSourceConfiguration.wrap(data_source_template.to_json()) cls.ds_1.engine_id = 'engine-1' cls.ds_1.save() cls.ds_2 = DataSourceConfiguration.wrap(data_source_template.to_json()) cls.ds_2.engine_id = 'engine-2' cls.ds_2.save() cls.db_context = temporary_database(cls.db2_name) cls.db_context.__enter__() cls.ds1_adapter = get_indicator_adapter(cls.ds_1) cls.ds2_adapter = get_indicator_adapter(cls.ds_2)
def setUp(self): self.config = get_sample_data_source()
def setUpClass(cls): cls._data_source = get_sample_data_source() cls._resume_helper = DataSourceResumeHelper(cls._data_source)
def setUp(self): self.config = get_sample_data_source()
def setUp(self): self.config = get_sample_data_source() self.config.save() self.adapter = IndicatorSqlAdapter(self.config) self.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886)
def _get_config(self, extra_id): config = get_sample_data_source() config.table_id = config.table_id + extra_id return config
def setUpClass(cls): super(DataSourceResumeBuildTest, cls).setUpClass() cls._data_source = get_sample_data_source() cls._resume_helper = DataSourceResumeHelper(cls._data_source)
def teardown(self): # we need to get the config multiple times in the test for it to properly # recalculate the schema, so we can't have a class wide config variable config = get_sample_data_source() adapter = get_indicator_adapter(config) adapter.drop_table()
def setUp(self): super(IndicatorPillowTestBase, self).setUp() self.config = get_sample_data_source() self.config.save() self.adapter = get_indicator_adapter(self.config) self.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886)
def _get_config(self, extra_id): config = get_sample_data_source() config.table_id = config.table_id + extra_id return config
def setUp(self): self.config = get_sample_data_source() self.pillow = ConfigurableIndicatorPillow() self.pillow.bootstrap(configs=[self.config]) self.adapter = IndicatorSqlAdapter(self.config) self.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886)