def tearDownModule(): if settings.USE_PARTITIONED_DATABASE: return _call_center_domain_mock = mock.patch( 'corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider' ) _call_center_domain_mock.start() with override_settings(SERVER_ENVIRONMENT='icds'): configs = StaticDataSourceConfiguration.by_domain('icds-cas') adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: if adapter.config.table_id == 'static-child_health_cases': # hack because this is in a migration adapter.clear_table() continue adapter.drop_table() cleanup_misc_agg_tables() LocationType.objects.filter(domain='icds-cas').delete() SQLLocation.objects.filter(domain='icds-cas').delete() Domain.get_by_name('icds-cas').delete() _call_center_domain_mock.stop() _stop_transaction_exemption()
def setUpModule(): if isinstance(Domain.get_db(), Mock): # needed to skip setUp for javascript tests thread on Travis return _call_center_domain_mock = mock.patch( 'corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider' ) _call_center_domain_mock.start() domain = create_domain('champ-cameroon') with override_settings(SERVER_ENVIRONMENT='production'): configs = StaticDataSourceConfiguration.by_domain(domain.name) adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: adapter.build_table() engine = connection_manager.get_engine(UCR_ENGINE_ID) metadata = sqlalchemy.MetaData(bind=engine) metadata.reflect(bind=engine, extend_existing=True) path = os.path.join(os.path.dirname(__file__), 'fixtures') for file_name in os.listdir(path): with open(os.path.join(path, file_name), encoding='utf-8') as f: table_name = get_table_name(domain.name, file_name[:-4]) table = metadata.tables[table_name] postgres_copy.copy_from( f, table, engine, format='csv' if six.PY3 else b'csv', null='' if six.PY3 else b'', header=True ) _call_center_domain_mock.stop()
def get_datasources_for_domain(domain, referenced_doc_type=None, include_static=False, include_aggregate=False): from corehq.apps.userreports.models import DataSourceConfiguration, StaticDataSourceConfiguration key = [domain] if referenced_doc_type: key.append(referenced_doc_type) datasources = sorted(DataSourceConfiguration.view( 'userreports/data_sources_by_build_info', startkey=key, endkey=key + [{}], reduce=False, include_docs=True), key=lambda config: config.display_name or '') if include_static: static_ds = StaticDataSourceConfiguration.by_domain(domain) if referenced_doc_type: static_ds = [ ds for ds in static_ds if ds.referenced_doc_type == referenced_doc_type ] datasources.extend( sorted(static_ds, key=lambda config: config.display_name)) if include_aggregate: from corehq.apps.aggregate_ucrs.models import AggregateTableDefinition datasources.extend( AggregateTableDefinition.objects.filter(domain=domain).all()) return datasources
def setUpModule(): _call_center_domain_mock = mock.patch( 'corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider' ) _call_center_domain_mock.start() domain = create_domain('champ-cameroon') try: configs = StaticDataSourceConfiguration.by_domain(domain.name) adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: adapter.build_table() engine = connection_manager.get_engine(UCR_ENGINE_ID) metadata = sqlalchemy.MetaData(bind=engine) metadata.reflect(bind=engine, extend_existing=True) path = os.path.join(os.path.dirname(__file__), 'fixtures') for file_name in os.listdir(path): with open(os.path.join(path, file_name), encoding='utf-8') as f: table_name = get_table_name(domain.name, file_name[:-4]) table = metadata.tables[table_name] postgres_copy.copy_from(f, table, engine, format='csv', null='', header=True) except Exception: tearDownModule() raise _call_center_domain_mock.stop()
def tearDownModule(): if settings.USE_PARTITIONED_DATABASE: return _call_center_domain_mock = mock.patch( 'corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider' ) _call_center_domain_mock.start() with override_settings(SERVER_ENVIRONMENT='icds'): configs = StaticDataSourceConfiguration.by_domain('icds-cas') adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: if adapter.config.table_id == 'static-child_health_cases': # hack because this is in a migration adapter.clear_table() continue adapter.drop_table() engine = connection_manager.get_engine(ICDS_UCR_ENGINE_ID) with engine.begin() as connection: metadata = sqlalchemy.MetaData(bind=engine) metadata.reflect(bind=engine, extend_existing=True) table = metadata.tables['ucr_table_name_mapping'] delete = table.delete() connection.execute(delete) LocationType.objects.filter(domain='icds-cas').delete() SQLLocation.objects.filter(domain='icds-cas').delete() Domain.get_by_name('icds-cas').delete() _call_center_domain_mock.stop()
def setup_tables_and_fixtures(domain_name): configs = StaticDataSourceConfiguration.by_domain(domain_name) adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: try: adapter.drop_table() except Exception: pass adapter.build_table() cleanup_misc_agg_tables() engine = connection_manager.get_engine(ICDS_UCR_CITUS_ENGINE_ID) metadata = sqlalchemy.MetaData(bind=engine) metadata.reflect(bind=engine, extend_existing=True) path = os.path.join(os.path.dirname(__file__), 'fixtures') for file_name in os.listdir(path): with open(os.path.join(path, file_name), encoding='utf-8') as f: table_name = FILE_NAME_TO_TABLE_MAPPING[file_name[:-4]] table = metadata.tables[table_name] if not table_name.startswith('icds_dashboard_'): columns = [ '"{}"'.format(c.strip()) # quote to preserve case for c in f.readline().split(',') ] postgres_copy.copy_from(f, table, engine, format='csv', null='', columns=columns) _distribute_tables_for_citus(engine)
def tearDownModule(): if settings.USE_PARTITIONED_DATABASE: return _call_center_domain_mock = mock.patch( 'corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider' ) _call_center_domain_mock.start() with override_settings(SERVER_ENVIRONMENT='icds'): configs = StaticDataSourceConfiguration.by_domain('icds-cas') adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: if adapter.config.table_id == 'static-child_health_cases': # hack because this is in a migration adapter.clear_table() continue adapter.drop_table() engine = connection_manager.get_engine(ICDS_UCR_ENGINE_ID) with engine.begin() as connection: metadata = sqlalchemy.MetaData(bind=engine) metadata.reflect(bind=engine, extend_existing=True) table = metadata.tables['ucr_table_name_mapping'] delete = table.delete() connection.execute(delete) LocationType.objects.filter(domain='icds-cas').delete() SQLLocation.objects.filter(domain='icds-cas').delete() Domain.get_by_name('icds-cas').delete() _call_center_domain_mock.stop()
def get_datasources_for_domain(domain, referenced_doc_type=None, include_static=False, include_aggregate=False): from corehq.apps.userreports.models import DataSourceConfiguration, StaticDataSourceConfiguration key = [domain] if referenced_doc_type: key.append(referenced_doc_type) datasources = sorted( DataSourceConfiguration.view( 'userreports/data_sources_by_build_info', startkey=key, endkey=key + [{}], reduce=False, include_docs=True ), key=lambda config: config.display_name or '') if include_static: static_ds = StaticDataSourceConfiguration.by_domain(domain) if referenced_doc_type: static_ds = [ds for ds in static_ds if ds.referenced_doc_type == referenced_doc_type] datasources.extend(sorted(static_ds, key=lambda config: config.display_name)) if include_aggregate: from corehq.apps.aggregate_ucrs.models import AggregateTableDefinition datasources.extend(AggregateTableDefinition.objects.filter(domain=domain).all()) return datasources
def _ucr(self): static_datasources = StaticDataSourceConfiguration.by_domain( self.domain) dynamic_datasources = DataSourceConfiguration.by_domain(self.domain) self._print_value('Static UCR data sources', len(static_datasources)) self._print_value('Dynamic UCR data sources', len(dynamic_datasources)) def _get_count(config): table_name = get_table_name(config.domain, config.table_id) db_name = connection_manager.get_django_db_alias(config.engine_id) return _get_count_from_explain_raw( db_name, 'SELECT * FROM "%s"' % table_name, []) def _get_table_size(config): table_name = get_table_name(config.domain, config.table_id) db_name = connection_manager.get_django_db_alias(config.engine_id) db_cursor = connections[db_name].cursor() with db_cursor as cursor: cursor.execute( "SELECT pg_total_relation_size('\"%s\"')" % table_name, []) bytes = cursor.fetchone()[0] return bytes rows = sorted( [(datasource.display_name, _get_count(datasource), datasource.referenced_doc_type, _get_table_size(datasource)) for datasource in static_datasources + dynamic_datasources], key=lambda r: r[-1]) self._print_table([ 'Datasource name', 'Row count (approximate)', 'Doc type', 'Size', 'Size (bytes)' ], rows)
def _setup_ucr_tables(): with mock.patch('corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider'): with override_settings(SERVER_ENVIRONMENT=TEST_ENVIRONMENT): configs = StaticDataSourceConfiguration.by_domain(TEST_DOMAIN) adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: try: adapter.drop_table() except Exception: pass adapter.build_table() engine = connection_manager.get_engine('aaa-data') metadata = sqlalchemy.MetaData(bind=engine) metadata.reflect(bind=engine, extend_existing=True) for file_name in os.listdir(INPUT_PATH): with open(os.path.join(INPUT_PATH, file_name), encoding='utf-8') as f: table_name = FILE_NAME_TO_TABLE_MAPPING[file_name[:-4]] table = metadata.tables[table_name] columns = [ '"{}"'.format(c.strip()) # quote to preserve case for c in f.readline().split(',') ] postgres_copy.copy_from( f, table, engine, format='csv' if six.PY3 else b'csv', null='' if six.PY3 else b'', columns=columns )
def _setup_ucr_tables(): with mock.patch( 'corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider' ): with override_settings(SERVER_ENVIRONMENT=TEST_ENVIRONMENT): configs = StaticDataSourceConfiguration.by_domain(TEST_DOMAIN) adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: try: adapter.drop_table() except Exception: pass adapter.build_table() engine = connection_manager.get_engine('aaa-data') metadata = sqlalchemy.MetaData(bind=engine) metadata.reflect(bind=engine, extend_existing=True) for file_name in os.listdir(INPUT_PATH): with open(os.path.join(INPUT_PATH, file_name), encoding='utf-8') as f: table_name = FILE_NAME_TO_TABLE_MAPPING[file_name[:-4]] table = metadata.tables[table_name] columns = [ '"{}"'.format(c.strip()) # quote to preserve case for c in f.readline().split(',') ] postgres_copy.copy_from(f, table, engine, format='csv', null='', columns=columns)
def handle(self, domain, **options): tables = StaticDataSourceConfiguration.by_domain(domain) tables.extend(DataSourceConfiguration.by_domain(domain)) print("Rebuilding {} tables".format(len(tables))) for table in tables: tasks.rebuild_indicators(table._id)
def _shared_context(domain): static_reports = list(StaticReportConfiguration.by_domain(domain)) static_data_sources = list(StaticDataSourceConfiguration.by_domain(domain)) return { 'domain': domain, 'reports': ReportConfiguration.by_domain(domain) + static_reports, 'data_sources': DataSourceConfiguration.by_domain(domain) + static_data_sources, }
def handle(self, domain, **options): tables = StaticDataSourceConfiguration.by_domain(domain) tables.extend(DataSourceConfiguration.by_domain(domain)) print("Rebuilding {} tables".format(len(tables))) for table in tables: tasks.rebuild_indicators(table._id)
def __init__(self, domain, *args, **kwargs): self.domain = domain standard_sources = DataSourceConfiguration.by_domain(self.domain) custom_sources = list(StaticDataSourceConfiguration.by_domain(domain)) available_data_sources = standard_sources + custom_sources super(ReportDataSourceField, self).__init__( choices=[(src._id, src.display_name) for src in available_data_sources], *args, **kwargs )
def main_context(self): static_reports = list(StaticReportConfiguration.by_domain(self.domain)) static_data_sources = list(StaticDataSourceConfiguration.by_domain(self.domain)) context = super(BaseUserConfigReportsView, self).main_context context.update({ 'reports': ReportConfiguration.by_domain(self.domain) + static_reports, 'data_sources': DataSourceConfiguration.by_domain(self.domain) + static_data_sources, }) return context
def __init__(self, domain, *args, **kwargs): self.domain = domain standard_sources = DataSourceConfiguration.by_domain(self.domain) custom_sources = list(StaticDataSourceConfiguration.by_domain(domain)) available_data_sources = standard_sources + custom_sources super(ReportDataSourceField, self).__init__( choices=[(src._id, src.display_name) for src in available_data_sources], *args, **kwargs )
def generate_dump_script(self, source_engine_id): self.seen_tables = set() source_engine = connection_manager.get_engine(source_engine_id) # direct dump and load from parent + child tables with source_engine.begin() as source_conn: insp = sqlinspect(source_conn) for table in keep_child_tables + plain_tables: if table in self.all_tables: for line in self.get_table_date_target(insp, table): self.insert_row(line) # direct dump and load from parent # dump from all child tables into parent table for table in drop_child_tables: if table in self.all_tables: for line in self.get_table_date_target(insp, table, all_in_parent=True): self.insert_row(line) for datasource in StaticDataSourceConfiguration.by_domain( DASHBOARD_DOMAIN): if source_engine_id == datasource.engine_id or source_engine_id in datasource.mirrored_engine_ids: adapter = get_indicator_adapter(datasource) table_name = adapter.get_table().name # direct dump and load from parent # dump from all child tables into parent table # - if table is distrubuted, citus will distribute the data # - if table is partitioned the triggers on the parent will distribute the data for line in self.get_table_date_target(insp, table_name, all_in_parent=True): self.insert_row(line) remaining_tables = self.all_tables - self.seen_tables - IGNORE_TABLES icds_ucr_prefix = '{}{}_'.format(UCR_TABLE_PREFIX, DASHBOARD_DOMAIN) def keep_table(table): root_table = self.child_parent_mapping.get(table, table) return not root_table.startswith( UCR_TABLE_PREFIX) or root_table.startswith(icds_ucr_prefix) remaining_tables = list(filter(keep_table, remaining_tables)) if remaining_tables: self.stderr.write("Some tables not seen:") for t in remaining_tables: parent = self.child_parent_mapping.get(t) if parent: self.stderr.write("\t{} (parent: {})".format( t, parent)) else: self.stderr.write("\t{}".format(t))
def _shared_context(domain): static_reports = list(StaticReportConfiguration.by_domain(domain)) static_data_sources = list(StaticDataSourceConfiguration.by_domain(domain)) return { 'domain': domain, 'reports': ReportConfiguration.by_domain(domain) + static_reports, 'data_sources': DataSourceConfiguration.by_domain(domain) + static_data_sources, }
def handle(self, engine_id, **options): db_alias = connection_manager.get_django_db_alias(engine_id) call_options = copy(options) call_options['database'] = db_alias call_command('migrate', **call_options) for ds in StaticDataSourceConfiguration.by_domain(DASHBOARD_DOMAIN): if engine_id == ds.engine_id or engine_id in ds.mirrored_engine_ids: adapter = IndicatorSqlAdapter(ds, engine_id=engine_id) adapter.build_table()
def __init__(self, domain, *args, **kwargs): self.domain = domain standard_sources = DataSourceConfiguration.by_domain(self.domain) custom_sources = list(StaticDataSourceConfiguration.by_domain(domain)) available_data_sources = standard_sources + custom_sources if toggles.AGGREGATE_UCRS.enabled(domain): from corehq.apps.aggregate_ucrs.models import AggregateTableDefinition available_data_sources += AggregateTableDefinition.objects.filter(domain=self.domain) super(ReportDataSourceField, self).__init__( choices=[(src.data_source_id, src.display_name) for src in available_data_sources], *args, **kwargs )
def handle(self, *args, **options): if len(args) < 1: raise CommandError('Usage is rebuild_tables_by_domain %s' % self.args) domain = args[0] tables = StaticDataSourceConfiguration.by_domain(domain) tables.extend(DataSourceConfiguration.by_domain(domain)) print("Rebuilding {} tables".format(len(tables))) for table in tables: tasks.rebuild_indicators(table._id)
def main_context(self): static_reports = list(StaticReportConfiguration.by_domain(self.domain)) static_data_sources = list( StaticDataSourceConfiguration.by_domain(self.domain)) context = super(BaseUserConfigReportsView, self).main_context context.update({ 'reports': ReportConfiguration.by_domain(self.domain) + static_reports, 'data_sources': DataSourceConfiguration.by_domain(self.domain) + static_data_sources, }) return context
def tearDownModule(): _call_center_domain_mock = mock.patch( 'corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider' ) _call_center_domain_mock.start() configs = StaticDataSourceConfiguration.by_domain('champ-cameroon') adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: adapter.drop_table() Domain.get_by_name('champ-cameroon').delete() _call_center_domain_mock.stop()
def setUpModule(): if settings.USE_PARTITIONED_DATABASE: return _call_center_domain_mock = mock.patch( 'corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider' ) _call_center_domain_mock.start() domain = create_domain('icds-cas') location_type = LocationType.objects.create( domain=domain.name, name='block', ) SQLLocation.objects.create( domain=domain.name, name='b1', location_id='b1', location_type=location_type ) with override_settings(SERVER_ENVIRONMENT='icds'): configs = StaticDataSourceConfiguration.by_domain('icds-cas') adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: if adapter.config.table_id == 'static-child_health_cases': # hack because this is in a migration continue adapter.build_table() engine = connection_manager.get_session_helper(settings.ICDS_UCR_TEST_DATABASE_ALIAS).engine metadata = sqlalchemy.MetaData(bind=engine) metadata.reflect(bind=engine, extend_existing=True) path = os.path.join(os.path.dirname(__file__), 'fixtures') for file_name in os.listdir(path): with open(os.path.join(path, file_name)) as f: table_name = FILE_NAME_TO_TABLE_MAPPING[file_name[:-4]] table = metadata.tables[table_name] postgres_copy.copy_from(f, table, engine, format='csv', null='', header=True) try: move_ucr_data_into_aggregation_tables(datetime(2017, 5, 28), intervals=2) except AssertionError: pass _call_center_domain_mock.stop()
def tearDownModule(): if isinstance(Domain.get_db(), Mock): # needed to skip setUp for javascript tests thread on Travis return _call_center_domain_mock = mock.patch( 'corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider' ) _call_center_domain_mock.start() configs = StaticDataSourceConfiguration.by_domain('champ-cameroon') adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: adapter.drop_table() Domain.get_by_name('champ-cameroon').delete() _call_center_domain_mock.stop()
def copy_ucr_datasources(self): datasource_map = {} datasources = get_datasources_for_domain(self.existing_domain) for datasource in datasources: datasource.meta.build.finished = False datasource.meta.build.initiated = None old_id, new_id = self.save_couch_copy(datasource, self.new_domain) datasource_map[old_id] = new_id for static_datasource in StaticDataSourceConfiguration.by_domain(self.existing_domain): table_id = static_datasource.get_id.replace( StaticDataSourceConfiguration._datasource_id_prefix + self.existing_domain + "-", "" ) new_id = StaticDataSourceConfiguration.get_doc_id(self.new_domain, table_id) # check that new datasource is in new domain's list of static datasources StaticDataSourceConfiguration.by_id(new_id) datasource_map[static_datasource.get_id] = new_id return datasource_map
def tearDownModule(): if isinstance(Domain.get_db(), Mock): # needed to skip setUp for javascript tests thread on Travis return _call_center_domain_mock = mock.patch( 'corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider' ) _call_center_domain_mock.start() configs = StaticDataSourceConfiguration.by_domain('champ-cameroon') adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: adapter.drop_table() Domain.get_by_name('champ-cameroon').delete() _call_center_domain_mock.stop()
def setUpClass(cls): super(AggregationScriptTest, cls).setUpClass() _call_center_domain_mock = mock.patch( 'corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider' ) _call_center_domain_mock.start() configs = StaticDataSourceConfiguration.by_domain('icds-cas') cls.adapters = [get_indicator_adapter(config) for config in configs] for adapter in cls.adapters: if adapter.config.table_id == 'static-child_health_cases': # hack because this is in a migration continue adapter.rebuild_table() cls.setUpTestData() move_ucr_data_into_aggregation_tables(datetime(2017, 5, 28), intervals=2) _call_center_domain_mock.stop()
def copy_ucr_datasources(self): datasource_map = {} datasources = get_datasources_for_domain(self.existing_domain) for datasource in datasources: datasource.meta.build.finished = False datasource.meta.build.initiated = None old_id, new_id = self.save_couch_copy(datasource, self.new_domain) datasource_map[old_id] = new_id for static_datasource in StaticDataSourceConfiguration.by_domain(self.existing_domain): table_id = static_datasource.get_id.replace( StaticDataSourceConfiguration._datasource_id_prefix + self.existing_domain + '-', '' ) new_id = StaticDataSourceConfiguration.get_doc_id(self.new_domain, table_id) # check that new datasource is in new domain's list of static datasources StaticDataSourceConfiguration.by_id(new_id) datasource_map[static_datasource.get_id] = new_id return datasource_map
def _ucr(self): static_datasources = StaticDataSourceConfiguration.by_domain( self.domain) dynamic_datasources = DataSourceConfiguration.by_domain(self.domain) self._print_value('Static UCR data sources', len(static_datasources)) self._print_value('Dynamic UCR data sources', len(dynamic_datasources)) def _get_count(config): table_name = get_table_name(config.domain, config.table_id) db_name = connection_manager.get_django_db_alias(config.engine_id) query = ('SELECT * FROM "%s"' % table_name, []) try: return estimate_row_count(query, db_name) except ProgrammingError: return "Table not found" def _get_table_size(config): table_name = get_table_name(config.domain, config.table_id) db_name = connection_manager.get_django_db_alias(config.engine_id) db_cursor = connections[db_name].cursor() with db_cursor as cursor: try: cursor.execute( "SELECT pg_total_relation_size('\"%s\"')" % table_name, []) bytes = cursor.fetchone()[0] return bytes except ProgrammingError: return "Table not found" rows = sorted( [(datasource.display_name, _get_count(datasource), datasource.referenced_doc_type, _get_table_size(datasource)) for datasource in static_datasources + dynamic_datasources], key=lambda r: r[-1] if r[-1] != 'Table not found' else 0) self.stdout.write('UCR datasource sizes') self._print_table([ 'Datasource name', 'Row count (approximate)', 'Doc type', 'Size (bytes)' ], rows)
def _teardown_ucr_tables(): with override_settings(SERVER_ENVIRONMENT=TEST_ENVIRONMENT): configs = StaticDataSourceConfiguration.by_domain(TEST_DOMAIN) adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: adapter.drop_table()
def setUpModule(): _call_center_domain_mock = mock.patch( 'corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider' ) _call_center_domain_mock.start() domain = create_domain('test-pna') region_location_type = LocationType.objects.create( domain='test-pna', name='R\u00e9gion', ) SQLLocation.objects.create(domain='test-pna', name='Region Test', location_id='8cde73411ddc4488a7f913c99499ead4', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='PASSY', location_id='1991b4dfe166335e342f28134b85fcac', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='r1', location_id='0682630532ff25717176320482ff1028', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='r2', location_id='582c5d65a307baa7a38e7b5e651fd5fc', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='r3', location_id='bd0395ba4a4fbd38c90765bd04208a8f', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='r4', location_id='6ed1f958fccd1b8202e8e30851a2b326', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='r5', location_id='1991b4dfe166335e342f28134b85f516', location_type=region_location_type) district_location_type = LocationType.objects.create( domain='test-pna', name='District', ) SQLLocation.objects.create(domain='test-pna', name='District Test', location_id='3db74fac2bad4e708e2b03800cc5ab73', location_type=district_location_type) pps_location_type = LocationType.objects.create( domain='test-pna', name='PPS', ) SQLLocation.objects.create(domain='test-pna', name='P2', location_id='ccf4430f5c3f493797486d6ce1c39682', location_type=pps_location_type) SQLProduct.objects.create(domain='test-pna', name='Collier', code='product1', product_id='product1') SQLProduct.objects.create(domain='test-pna', name='CU', code='product2', product_id='product2') SQLProduct.objects.create(domain='test-pna', name='Depo-Provera', code='product3', product_id='product3') SQLProduct.objects.create(domain='test-pna', name='DIU', code='product4', product_id='product4') SQLProduct.objects.create(domain='test-pna', name='Jadelle', code='product5', product_id='product5') SQLProduct.objects.create(domain='test-pna', name='Microgynon/Lof.', code='product6', product_id='product6') SQLProduct.objects.create(domain='test-pna', name='Microlut/Ovrette', code='product7', product_id='product7') SQLProduct.objects.create(domain='test-pna', name='Preservatif Feminin', code='product8', product_id='product8') SQLProduct.objects.create(domain='test-pna', name='Preservatif Masculin', code='product9', product_id='product9') SQLProduct.objects.create(domain='test-pna', name='Sayana Press', code='product10', product_id='product10') SQLProduct.objects.create(domain='test-pna', name='IMPLANON', code='product11', product_id='product11') SQLProduct.objects.create(domain='test-pna', name='Product 7', code='p7', product_id='p7') with override_settings(SERVER_ENVIRONMENT='production'): configs = StaticDataSourceConfiguration.by_domain(domain.name) adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: adapter.build_table() engine = connection_manager.get_engine(UCR_ENGINE_ID) metadata = sqlalchemy.MetaData(bind=engine) metadata.reflect(bind=engine, extend_existing=True) path = os.path.join(os.path.dirname(__file__), 'fixtures') for file_name in os.listdir(path): with open(os.path.join(path, file_name), encoding='utf-8') as f: table_name = get_table_name(domain.name, file_name[:-4]) table = metadata.tables[table_name] postgres_copy.copy_from(f, table, engine, format='csv', null='', header=True) _call_center_domain_mock.stop()
def setUpModule(): if isinstance(Domain.get_db(), Mock): # needed to skip setUp for javascript tests thread on Travis return _call_center_domain_mock = mock.patch( 'corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider' ) _call_center_domain_mock.start() domain = create_domain('test-pna') region_location_type = LocationType.objects.create( domain='test-pna', name='R\u00e9gion', ) SQLLocation.objects.create(domain='test-pna', name='Region Test', location_id='8cde73411ddc4488a7f913c99499ead4', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='PASSY', location_id='1991b4dfe166335e342f28134b85fcac', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='r1', location_id='0682630532ff25717176320482ff0d6e', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='r2', location_id='0682630532ff25717176320482ff1028', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='r3', location_id='1991b4dfe166335e342f28134b85e7df', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='r4', location_id='1991b4dfe166335e342f28134b85f516', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='r5', location_id='3f720b4e733bea3cc401150231831e95', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='r6', location_id='582c5d65a307baa7a38e7b5e651fd5fc', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='r7', location_id='6ed1f958fccd1b8202e8e30851a2b326', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='r8', location_id='942e078b8dfa9551a9ff799301b0854d', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='r9', location_id='942e078b8dfa9551a9ff799301b08642', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='r10', location_id='942e078b8dfa9551a9ff799301b08682', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='r11', location_id='abb51a7f4ab64b70b899d86e54e62f51', location_type=region_location_type) SQLLocation.objects.create(domain='test-pna', name='r12', location_id='bd0395ba4a4fbd38c90765bd04208a8f', location_type=region_location_type) district_location_type = LocationType.objects.create( domain='test-pna', name='District', ) SQLLocation.objects.create(domain='test-pna', name='District Test', location_id='3db74fac2bad4e708e2b03800cc5ab73', location_type=district_location_type) pps_location_type = LocationType.objects.create( domain='test-pna', name='PPS', ) SQLLocation.objects.create(domain='test-pna', name='P2', location_id='ccf4430f5c3f493797486d6ce1c39682', location_type=pps_location_type) SQLProduct.objects.create(domain='test-pna', name='Product 7', code='p7', product_id='p7') with override_settings(SERVER_ENVIRONMENT='production'): configs = StaticDataSourceConfiguration.by_domain(domain.name) adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: adapter.build_table() engine = connection_manager.get_engine(UCR_ENGINE_ID) metadata = sqlalchemy.MetaData(bind=engine) metadata.reflect(bind=engine, extend_existing=True) path = os.path.join(os.path.dirname(__file__), 'fixtures') for file_name in os.listdir(path): with open(os.path.join(path, file_name), encoding='utf-8') as f: table_name = get_table_name(domain.name, file_name[:-4]) table = metadata.tables[table_name] postgres_copy.copy_from(f, table, engine, format=b'csv', null=b'', header=True) _call_center_domain_mock.stop()
def setUpModule(): if isinstance(Domain.get_db(), Mock): # needed to skip setUp for javascript tests thread on Travis return _call_center_domain_mock = mock.patch( 'corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider' ) _call_center_domain_mock.start() domain = create_domain('test-pna') region_location_type = LocationType.objects.create( domain='test-pna', name='R\u00e9gion', ) SQLLocation.objects.create( domain='test-pna', name='Region Test', location_id='8cde73411ddc4488a7f913c99499ead4', location_type=region_location_type ) SQLLocation.objects.create( domain='test-pna', name='PASSY', location_id='1991b4dfe166335e342f28134b85fcac', location_type=region_location_type ) SQLLocation.objects.create( domain='test-pna', name='r1', location_id='0682630532ff25717176320482ff1028', location_type=region_location_type ) SQLLocation.objects.create( domain='test-pna', name='r2', location_id='582c5d65a307baa7a38e7b5e651fd5fc', location_type=region_location_type ) SQLLocation.objects.create( domain='test-pna', name='r3', location_id='bd0395ba4a4fbd38c90765bd04208a8f', location_type=region_location_type ) SQLLocation.objects.create( domain='test-pna', name='r4', location_id='6ed1f958fccd1b8202e8e30851a2b326', location_type=region_location_type ) SQLLocation.objects.create( domain='test-pna', name='r5', location_id='1991b4dfe166335e342f28134b85f516', location_type=region_location_type ) district_location_type = LocationType.objects.create( domain='test-pna', name='District', ) SQLLocation.objects.create( domain='test-pna', name='District Test', location_id='3db74fac2bad4e708e2b03800cc5ab73', location_type=district_location_type ) pps_location_type = LocationType.objects.create( domain='test-pna', name='PPS', ) SQLLocation.objects.create( domain='test-pna', name='P2', location_id='ccf4430f5c3f493797486d6ce1c39682', location_type=pps_location_type ) SQLProduct.objects.create( domain='test-pna', name='Collier', code='product1', product_id='product1' ) SQLProduct.objects.create( domain='test-pna', name='CU', code='product2', product_id='product2' ) SQLProduct.objects.create( domain='test-pna', name='Depo-Provera', code='product3', product_id='product3' ) SQLProduct.objects.create( domain='test-pna', name='DIU', code='product4', product_id='product4' ) SQLProduct.objects.create( domain='test-pna', name='Jadelle', code='product5', product_id='product5' ) SQLProduct.objects.create( domain='test-pna', name='Microgynon/Lof.', code='product6', product_id='product6' ) SQLProduct.objects.create( domain='test-pna', name='Microlut/Ovrette', code='product7', product_id='product7' ) SQLProduct.objects.create( domain='test-pna', name='Preservatif Feminin', code='product8', product_id='product8' ) SQLProduct.objects.create( domain='test-pna', name='Preservatif Masculin', code='product9', product_id='product9' ) SQLProduct.objects.create( domain='test-pna', name='Sayana Press', code='product10', product_id='product10' ) SQLProduct.objects.create( domain='test-pna', name='IMPLANON', code='product11', product_id='product11' ) SQLProduct.objects.create( domain='test-pna', name='Product 7', code='p7', product_id='p7' ) with override_settings(SERVER_ENVIRONMENT='production'): configs = StaticDataSourceConfiguration.by_domain(domain.name) adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: adapter.build_table() engine = connection_manager.get_engine(UCR_ENGINE_ID) metadata = sqlalchemy.MetaData(bind=engine) metadata.reflect(bind=engine, extend_existing=True) path = os.path.join(os.path.dirname(__file__), 'fixtures') for file_name in os.listdir(path): with open(os.path.join(path, file_name), encoding='utf-8') as f: table_name = get_table_name(domain.name, file_name[:-4]) table = metadata.tables[table_name] postgres_copy.copy_from( f, table, engine, format='csv' if six.PY3 else b'csv', null='' if six.PY3 else b'', header=True ) _call_center_domain_mock.stop()
def by_domain(self, domain): return StaticDataSourceConfiguration.by_domain(domain)
def setUpModule(): if settings.USE_PARTITIONED_DATABASE: print( '============= WARNING: not running test setup because settings.USE_PARTITIONED_DATABASE is True.' ) return _call_center_domain_mock = mock.patch( 'corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider' ) _call_center_domain_mock.start() domain = create_domain('icds-cas') location_type = LocationType.objects.create( domain=domain.name, name='block', ) SQLLocation.objects.create(domain=domain.name, name='b1', location_id='b1', location_type=location_type) state_location_type = LocationType.objects.create( domain=domain.name, name='state', ) SQLLocation.objects.create(domain=domain.name, name='st1', location_id='st1', location_type=state_location_type) awc_location_type = LocationType.objects.create( domain=domain.name, name='awc', ) SQLLocation.objects.create(domain=domain.name, name='a7', location_id='a7', location_type=awc_location_type) with override_settings(SERVER_ENVIRONMENT='icds-new'): configs = StaticDataSourceConfiguration.by_domain('icds-cas') adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: try: adapter.drop_table() except Exception: pass adapter.build_table() engine = connection_manager.get_engine(ICDS_UCR_ENGINE_ID) metadata = sqlalchemy.MetaData(bind=engine) metadata.reflect(bind=engine, extend_existing=True) path = os.path.join(os.path.dirname(__file__), 'fixtures') for file_name in os.listdir(path): with open(os.path.join(path, file_name), encoding='utf-8') as f: table_name = FILE_NAME_TO_TABLE_MAPPING[file_name[:-4]] table = metadata.tables[table_name] if not table_name.startswith('icds_dashboard_'): postgres_copy.copy_from(f, table, engine, format=b'csv', null=b'', header=True) _aggregate_child_health_pnc_forms('st1', datetime(2017, 3, 31)) try: move_ucr_data_into_aggregation_tables(datetime(2017, 5, 28), intervals=2) except AssertionError as e: # we always use soft assert to email when the aggregation has completed if "Aggregation completed" not in str(e): print(e) tearDownModule() raise except Exception as e: print(e) tearDownModule() raise finally: _call_center_domain_mock.stop() with connections['icds-ucr'].cursor() as cursor: create_views(cursor)
def setUpModule(): if settings.USE_PARTITIONED_DATABASE: print('============= WARNING: not running test setup because settings.USE_PARTITIONED_DATABASE is True.') return _call_center_domain_mock = mock.patch( 'corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider' ) _call_center_domain_mock.start() domain = create_domain('icds-cas') location_type = LocationType.objects.create( domain=domain.name, name='block', ) SQLLocation.objects.create( domain=domain.name, name='b1', location_id='b1', location_type=location_type ) state_location_type = LocationType.objects.create( domain=domain.name, name='state', ) SQLLocation.objects.create( domain=domain.name, name='st1', location_id='st1', location_type=state_location_type ) SQLLocation.objects.create( domain=domain.name, name='st2', location_id='st2', location_type=state_location_type ) SQLLocation.objects.create( domain=domain.name, name='st3', location_id='st3', location_type=state_location_type ) SQLLocation.objects.create( domain=domain.name, name='st4', location_id='st4', location_type=state_location_type ) SQLLocation.objects.create( domain=domain.name, name='st5', location_id='st5', location_type=state_location_type ) SQLLocation.objects.create( domain=domain.name, name='st6', location_id='st6', location_type=state_location_type ) SQLLocation.objects.create( domain=domain.name, name='st7', location_id='st7', location_type=state_location_type ) awc_location_type = LocationType.objects.create( domain=domain.name, name='awc', ) SQLLocation.objects.create( domain=domain.name, name='a7', location_id='a7', location_type=awc_location_type ) with override_settings(SERVER_ENVIRONMENT='icds'): configs = StaticDataSourceConfiguration.by_domain('icds-cas') adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: try: adapter.drop_table() except Exception: pass adapter.build_table() engine = connection_manager.get_engine(ICDS_UCR_ENGINE_ID) metadata = sqlalchemy.MetaData(bind=engine) metadata.reflect(bind=engine, extend_existing=True) path = os.path.join(os.path.dirname(__file__), 'fixtures') for file_name in os.listdir(path): with open(os.path.join(path, file_name), encoding='utf-8') as f: table_name = FILE_NAME_TO_TABLE_MAPPING[file_name[:-4]] table = metadata.tables[table_name] if not table_name.startswith('icds_dashboard_'): columns = [ '"{}"'.format(c.strip()) # quote to preserve case for c in f.readline().split(',') ] postgres_copy.copy_from( f, table, engine, format='csv' if six.PY3 else b'csv', null='' if six.PY3 else b'', columns=columns ) for state_id in ('st1', 'st2'): _aggregate_child_health_pnc_forms(state_id, datetime(2017, 3, 31)) _aggregate_gm_forms(state_id, datetime(2017, 3, 31)) _aggregate_bp_forms(state_id, datetime(2017, 3, 31)) try: move_ucr_data_into_aggregation_tables(datetime(2017, 5, 28), intervals=2) build_incentive_report(agg_date=datetime(2017, 5, 28)) except Exception as e: print(e) tearDownModule() raise finally: _call_center_domain_mock.stop()
def setUpModule(): if settings.USE_PARTITIONED_DATABASE: print('============= WARNING: not running test setup because settings.USE_PARTITIONED_DATABASE is True.') return _call_center_domain_mock = mock.patch( 'corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider' ) _call_center_domain_mock.start() # _use_citus.enable() domain = create_domain('icds-cas') SQLLocation.objects.all().delete() LocationType.objects.all().delete() state_location_type = LocationType.objects.create( domain=domain.name, name='state', ) st1 = SQLLocation.objects.create( domain=domain.name, name='st1', location_id='st1', location_type=state_location_type ) st2 = SQLLocation.objects.create( domain=domain.name, name='st2', location_id='st2', location_type=state_location_type ) st3 = SQLLocation.objects.create( domain=domain.name, name='st3', location_id='st3', location_type=state_location_type ) st4 = SQLLocation.objects.create( domain=domain.name, name='st4', location_id='st4', location_type=state_location_type ) st5 = SQLLocation.objects.create( domain=domain.name, name='st5', location_id='st5', location_type=state_location_type ) st6 = SQLLocation.objects.create( domain=domain.name, name='st6', location_id='st6', location_type=state_location_type ) st7 = SQLLocation.objects.create( domain=domain.name, name='st7', location_id='st7', location_type=state_location_type ) supervisor_location_type = LocationType.objects.create( domain=domain.name, name='supervisor', ) s1 = SQLLocation.objects.create( domain=domain.name, name='s1', location_id='s1', location_type=supervisor_location_type, parent=st1 ) block_location_type = LocationType.objects.create( domain=domain.name, name='block', ) b1 = SQLLocation.objects.create( domain=domain.name, name='b1', location_id='b1', location_type=block_location_type, parent=s1 ) awc_location_type = LocationType.objects.create( domain=domain.name, name='awc', ) a7 = SQLLocation.objects.create( domain=domain.name, name='a7', location_id='a7', location_type=awc_location_type ) with override_settings(SERVER_ENVIRONMENT='icds'): configs = StaticDataSourceConfiguration.by_domain('icds-cas') adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: try: adapter.drop_table() except Exception: pass adapter.build_table() engine = connection_manager.get_engine(ICDS_UCR_ENGINE_ID) metadata = sqlalchemy.MetaData(bind=engine) metadata.reflect(bind=engine, extend_existing=True) path = os.path.join(os.path.dirname(__file__), 'fixtures') for file_name in os.listdir(path): with open(os.path.join(path, file_name), encoding='utf-8') as f: table_name = FILE_NAME_TO_TABLE_MAPPING[file_name[:-4]] table = metadata.tables[table_name] if not table_name.startswith('icds_dashboard_'): columns = [ '"{}"'.format(c.strip()) # quote to preserve case for c in f.readline().split(',') ] postgres_copy.copy_from( f, table, engine, format='csv' if six.PY3 else b'csv', null='' if six.PY3 else b'', columns=columns ) _distribute_tables_for_citus(engine) for state_id in ('st1', 'st2'): _aggregate_child_health_pnc_forms(state_id, datetime(2017, 3, 31)) _aggregate_gm_forms(state_id, datetime(2017, 3, 31)) _aggregate_bp_forms(state_id, datetime(2017, 3, 31)) try: move_ucr_data_into_aggregation_tables(datetime(2017, 5, 28), intervals=2) build_incentive_report(agg_date=datetime(2017, 5, 28)) except Exception as e: print(e) tearDownModule() raise finally: _call_center_domain_mock.stop()
def _teardown_ucr_tables(): with override_settings(SERVER_ENVIRONMENT=TEST_ENVIRONMENT): configs = StaticDataSourceConfiguration.by_domain(TEST_DOMAIN) adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: adapter.drop_table()
def setUpModule(): if isinstance(Domain.get_db(), Mock): # needed to skip setUp for javascript tests thread on Travis return _call_center_domain_mock = mock.patch( 'corehq.apps.callcenter.data_source.call_center_data_source_configuration_provider' ) _call_center_domain_mock.start() domain = create_domain('test-pna') region_location_type = LocationType.objects.create( domain='test-pna', name='R\u00e9gion', ) SQLLocation.objects.create( domain='test-pna', name='Region Test', location_id='8cde73411ddc4488a7f913c99499ead4', location_type=region_location_type ) district_location_type = LocationType.objects.create( domain='test-pna', name='District', ) SQLLocation.objects.create( domain='test-pna', name='District Test', location_id='3db74fac2bad4e708e2b03800cc5ab73', location_type=district_location_type ) pps_location_type = LocationType.objects.create( domain='test-pna', name='PPS', ) SQLLocation.objects.create( domain='test-pna', name='P2', location_id='ccf4430f5c3f493797486d6ce1c39682', location_type=pps_location_type ) with override_settings(SERVER_ENVIRONMENT='production'): configs = StaticDataSourceConfiguration.by_domain(domain.name) adapters = [get_indicator_adapter(config) for config in configs] for adapter in adapters: adapter.build_table() engine = connection_manager.get_engine(UCR_ENGINE_ID) metadata = sqlalchemy.MetaData(bind=engine) metadata.reflect(bind=engine, extend_existing=True) path = os.path.join(os.path.dirname(__file__), 'fixtures') for file_name in os.listdir(path): with open(os.path.join(path, file_name), encoding='utf-8') as f: table_name = get_table_name(domain.name, file_name[:-4]) table = metadata.tables[table_name] postgres_copy.copy_from(f, table, engine, format=b'csv', null=b'', header=True) _call_center_domain_mock.stop()