Exemplo n.º 1
0
    def setUpClass(cls):
        cls.db2_name = 'cchq_ucr_tests'
        db_conn_parts = settings.SQL_REPORTING_DATABASE_URL.split('/')
        db_conn_parts[-1] = cls.db2_name
        cls.db2_url = '/'.join(db_conn_parts)

        # setup patches
        cls.connection_string_patch = patch('corehq.sql_db.connections.connection_manager.get_connection_string')

        def connection_string_for_engine(engine_id):
            if engine_id == 'engine-1':
                return settings.SQL_REPORTING_DATABASE_URL
            else:
                return cls.db2_url

        mock_manager = cls.connection_string_patch.start()
        mock_manager.side_effect = connection_string_for_engine

        # setup data sources
        data_source_template = get_sample_data_source()
        cls.ds_1 = DataSourceConfiguration.wrap(data_source_template.to_json())
        cls.ds_1.engine_id = 'engine-1'
        cls.ds_1.save()
        cls.ds_2 = DataSourceConfiguration.wrap(data_source_template.to_json())
        cls.ds_2.engine_id = 'engine-2'
        cls.ds_2.save()

        cls.db_context = temporary_database(cls.db2_name)
        cls.db_context.__enter__()

        cls.ds1_adapter = IndicatorSqlAdapter(cls.ds_1)
        cls.ds2_adapter = IndicatorSqlAdapter(cls.ds_2)
Exemplo n.º 2
0
    def setUpClass(cls):
        super(UCRMultiDBTest, cls).setUpClass()
        cls.db2_name = 'cchq_ucr_tests'
        default_db_url = connections.connection_manager.get_connection_string(DEFAULT_DB_ALIAS)
        db_conn_parts = default_db_url.split('/')
        db_conn_parts[-1] = cls.db2_name
        cls.db2_url = '/'.join(db_conn_parts)

        cls.context_managers = ExitStack()
        cls.context_managers.enter_context(connections.override_engine('engine-1', default_db_url, 'default'))
        cls.context_managers.enter_context(connections.override_engine('engine-2', cls.db2_url, cls.db2_name))

        # setup data sources
        data_source_template = get_sample_data_source()
        cls.ds_1 = DataSourceConfiguration.wrap(data_source_template.to_json())
        cls.ds_1.engine_id = 'engine-1'
        cls.ds_1.save()
        cls.ds_2 = DataSourceConfiguration.wrap(data_source_template.to_json())
        cls.ds_2.engine_id = 'engine-2'
        cls.ds_2.save()

        cls.context_managers.enter_context(temporary_database(cls.db2_name))

        cls.ds1_adapter = get_indicator_adapter(cls.ds_1)
        cls.ds2_adapter = get_indicator_adapter(cls.ds_2)
Exemplo n.º 3
0
    def setUpClass(cls):
        cls.db2_name = 'cchq_ucr_tests'
        db_conn_parts = settings.SQL_REPORTING_DATABASE_URL.split('/')
        db_conn_parts[-1] = cls.db2_name
        cls.db2_url = '/'.join(db_conn_parts)

        # setup patches
        cls.engine_id_patches = (
            # unfortunately we need to patch this directly in modules that import it as well
            patch('corehq.apps.userreports.sql.connection.get_engine_id'),
            patch('corehq.apps.userreports.sql.adapter.get_engine_id'),
            patch('corehq.apps.userreports.reports.data_source.get_engine_id'),
        )
        cls.connection_string_patch = patch(
            'corehq.db.connection_manager.get_connection_string')
        for engine_id_patch in cls.engine_id_patches:
            mock_engine_id_method = engine_id_patch.start()
            mock_engine_id_method.side_effect = lambda x: x.engine_id

        def connection_string_for_engine(engine_id):
            if engine_id == 'engine-1':
                return settings.SQL_REPORTING_DATABASE_URL
            else:
                return cls.db2_url

        mock_manager = cls.connection_string_patch.start()
        mock_manager.side_effect = connection_string_for_engine

        # setup data sources
        data_source_template = get_sample_data_source()
        cls.ds_1 = DataSourceConfiguration.wrap(data_source_template.to_json())
        cls.ds_1.engine_id = 'engine-1'
        cls.ds_1.save()
        cls.ds_2 = DataSourceConfiguration.wrap(data_source_template.to_json())
        cls.ds_2.engine_id = 'engine-2'
        cls.ds_2.save()

        # use db1 engine to create db2 http://stackoverflow.com/a/8977109/8207
        cls.root_engine = create_engine(settings.SQL_REPORTING_DATABASE_URL)
        conn = cls.root_engine.connect()
        conn.execute('commit')
        try:
            conn.execute('CREATE DATABASE {}'.format(cls.db2_name))
        except ProgrammingError:
            # optimistically assume it failed because was already created.
            pass
        conn.close()

        cls.ds1_adapter = IndicatorSqlAdapter(cls.ds_1)
        cls.ds2_adapter = IndicatorSqlAdapter(cls.ds_2)
Exemplo n.º 4
0
    def setUpClass(cls):
        cls.db2_name = 'cchq_ucr_tests'
        db_conn_parts = settings.SQL_REPORTING_DATABASE_URL.split('/')
        db_conn_parts[-1] = cls.db2_name
        cls.db2_url = '/'.join(db_conn_parts)

        # setup patches
        cls.engine_id_patches = (
            # unfortunately we need to patch this directly in modules that import it as well
            patch('corehq.apps.userreports.sql.connection.get_engine_id'),
            patch('corehq.apps.userreports.sql.adapter.get_engine_id'),
            patch('corehq.apps.userreports.reports.data_source.get_engine_id'),
        )
        cls.connection_string_patch = patch('corehq.db.connection_manager.get_connection_string')
        for engine_id_patch in cls.engine_id_patches:
            mock_engine_id_method = engine_id_patch.start()
            mock_engine_id_method.side_effect = lambda x: x.engine_id

        def connection_string_for_engine(engine_id):
            if engine_id == 'engine-1':
                return settings.SQL_REPORTING_DATABASE_URL
            else:
                return cls.db2_url

        mock_manager = cls.connection_string_patch.start()
        mock_manager.side_effect = connection_string_for_engine

        # setup data sources
        data_source_template = get_sample_data_source()
        cls.ds_1 = DataSourceConfiguration.wrap(data_source_template.to_json())
        cls.ds_1.engine_id = 'engine-1'
        cls.ds_1.save()
        cls.ds_2 = DataSourceConfiguration.wrap(data_source_template.to_json())
        cls.ds_2.engine_id = 'engine-2'
        cls.ds_2.save()

        # use db1 engine to create db2 http://stackoverflow.com/a/8977109/8207
        cls.root_engine = create_engine(settings.SQL_REPORTING_DATABASE_URL)
        conn = cls.root_engine.connect()
        conn.execute('commit')
        try:
            conn.execute('CREATE DATABASE {}'.format(cls.db2_name))
        except ProgrammingError:
            # optimistically assume it failed because was already created.
            pass
        conn.close()

        cls.ds1_adapter = IndicatorSqlAdapter(cls.ds_1)
        cls.ds2_adapter = IndicatorSqlAdapter(cls.ds_2)
Exemplo n.º 5
0
def _update_linked_datasource(master_datasource, linked_datasource):
    master_datasource_json = master_datasource.to_json()
    linked_datasource_json = linked_datasource.to_json()

    master_datasource_json["domain"] = linked_datasource_json["domain"]
    master_datasource_json["_id"] = linked_datasource_json["_id"]
    master_datasource_json["_rev"] = linked_datasource_json["_rev"]
    master_datasource_json["meta"]["master_id"] = linked_datasource_json["meta"]["master_id"]

    _replace_master_app_ids(linked_datasource_json["domain"], master_datasource_json)

    linked_datasource_json.update(master_datasource_json)
    DataSourceConfiguration.wrap(linked_datasource_json).save()

    rebuild_indicators.delay(linked_datasource.get_id, source=f"Datasource link: {linked_datasource.get_id}")
Exemplo n.º 6
0
    def _create_data_source_config(self, indicators=None):
        default_indicator = [{
            "type": "expression",
            "column_id": "laugh_sound",
            "datatype": "string",
            "expression": {
                'type': 'named',
                'name': 'laugh_sound'
            }
        }]

        return DataSourceConfiguration.wrap({
            'display_name':
            'Mother Indicators',
            'doc_type':
            'DataSourceConfiguration',
            'domain':
            'test',
            'referenced_doc_type':
            'CommCareCase',
            'table_id':
            'mother_indicators',
            'configured_filter': {},
            'configured_indicators':
            indicators or default_indicator
        })
Exemplo n.º 7
0
def _get_or_create_datasource_link(domain_link, datasource):
    domain_datsources = get_datasources_for_domain(domain_link.linked_domain)
    existing_linked_datasources = [d for d in domain_datsources if d.meta.master_id == datasource.get_id]
    if existing_linked_datasources:
        return existing_linked_datasources[0]

    datasource_json = datasource.to_json()
    datasource_json["domain"] = domain_link.linked_domain
    datasource_json["_id"] = None
    datasource_json["_rev"] = None

    # app_id is needed to edit a report in report builder, but linked
    # reports can't be edited, so we can ignore this
    datasource_json["meta"]["build"]["app_id"] = None

    datasource_json["meta"]["master_id"] = datasource.get_id

    _replace_master_app_ids(domain_link.linked_domain, datasource_json)

    new_datasource = DataSourceConfiguration.wrap(datasource_json)
    new_datasource.save()

    rebuild_indicators.delay(new_datasource.get_id, source=f"Datasource link: {new_datasource.get_id}")

    return new_datasource
def _get_old_new_tablenames(engine_id=None):
    by_engine_id = defaultdict(list)
    seen_tables = defaultdict(set)
    for ds in StaticDataSourceConfiguration.all():
        ds_engine_id = ds['engine_id']
        if engine_id and ds_engine_id != engine_id:
            continue
        old, new = _table_names(ds.domain, ds.table_id)
        if old in seen_tables[ds_engine_id]:
            logger.warning('Duplicate table: %s - %s', ds.get_id, old)
            continue
        seen_tables[ds_engine_id].add(old)
        by_engine_id[ds_engine_id].append(DSConf(old, new, ds))

    data_source_ids = [
        row['id'] for row in DataSourceConfiguration.view(
            'userreports/active_data_sources',
            reduce=False,
            include_docs=False)
    ]
    for ds in iter_docs(DataSourceConfiguration.get_db(), data_source_ids):
        ds = DataSourceConfiguration.wrap(ds)
        ds_engine_id = ds['engine_id']
        if engine_id and ds_engine_id != engine_id:
            continue

        old, new = _table_names(ds.domain, ds.table_id)
        if old in seen_tables[ds_engine_id]:
            logger.warning('Duplicate table: %s - %s', ds.get_id, old)
            continue
        seen_tables[ds_engine_id].add(old)
        by_engine_id[ds_engine_id].append(DSConf(old, new, ds))

    return by_engine_id
 def setUp(self):
     self.indicator_configuration = DataSourceConfiguration.wrap({
         'display_name': 'Mother Indicators',
         'doc_type': 'DataSourceConfiguration',
         'domain': 'test',
         'referenced_doc_type': 'CommCareCase',
         'table_id': 'mother_indicators',
         'named_filters': {
             'pregnant': {
                 'type': 'property_match',
                 'property_name': 'mother_state',
                 'property_value': 'pregnant',
             }
         },
         'configured_filter': {
             'type': 'and',
             'filters': [
                 {
                     'property_name': 'type',
                     'property_value': 'ttc_mother',
                     'type': 'property_match',
                 },
                 {
                     'type': 'named',
                     'name': 'pregnant',
                 }
             ]
         }
     })
Exemplo n.º 10
0
def _get_or_create_datasource_link(domain_link, datasource, app_id):
    domain_datsources = get_datasources_for_domain(domain_link.linked_domain)
    existing_linked_datasources = [d for d in domain_datsources if d.meta.master_id == datasource.get_id]
    if existing_linked_datasources:
        return existing_linked_datasources[0]

    datasource_json = datasource.to_json()
    datasource_json["domain"] = domain_link.linked_domain
    datasource_json["_id"] = None
    datasource_json["_rev"] = None

    # app_id is needed to edit reports which is not possible with a linked project due to master_id
    # this is to ensure if the link is removed, the downstream report will be editable
    datasource_json["meta"]["build"]["app_id"] = app_id

    datasource_json["meta"]["master_id"] = datasource.get_id

    _replace_master_app_ids(domain_link.linked_domain, datasource_json)

    new_datasource = DataSourceConfiguration.wrap(datasource_json)
    new_datasource.save()

    rebuild_indicators.delay(
        new_datasource.get_id,
        source=f"Datasource link: {new_datasource.get_id}",
        domain=new_datasource.domain
    )

    return new_datasource
Exemplo n.º 11
0
def _make_data_source_for_domain(data_source_json, domain_name):
    from corehq.apps.userreports.models import StaticDataSourceConfiguration
    from corehq.apps.userreports.models import DataSourceConfiguration

    doc = deepcopy(data_source_json)
    doc['domain'] = domain_name
    doc['_id'] = StaticDataSourceConfiguration.get_doc_id(domain_name, doc['table_id'])
    return DataSourceConfiguration.wrap(doc)
 def test_no_self_lookups(self):
     bad_config = DataSourceConfiguration.wrap(self.indicator_configuration.to_json())
     bad_config.named_expressions['broken'] = {
         "type": "named",
         "name": "broken",
     }
     with self.assertRaises(BadSpecError):
         bad_config.validate()
Exemplo n.º 13
0
def _make_data_source_for_domain(data_source_json, domain_name):
    from corehq.apps.userreports.models import StaticDataSourceConfiguration
    from corehq.apps.userreports.models import DataSourceConfiguration

    doc = deepcopy(data_source_json)
    doc['domain'] = domain_name
    doc['_id'] = StaticDataSourceConfiguration.get_doc_id(domain_name, doc['table_id'])
    return DataSourceConfiguration.wrap(doc)
Exemplo n.º 14
0
 def test_no_self_lookups(self):
     bad_config = DataSourceConfiguration.wrap(self.indicator_configuration.to_json())
     bad_config.named_expressions['broken'] = {
         "type": "named",
         "name": "broken",
     }
     with self.assertRaises(BadSpecError):
         bad_config.validate()
Exemplo n.º 15
0
 def test_missing_no_named_in_named(self):
     bad_config = DataSourceConfiguration.wrap(self.indicator_configuration.to_json())
     bad_config.named_expressions['broken'] = {
         "type": "named",
         "name": "pregnant",
     }
     with self.assertRaises(BadSpecError):
         bad_config.validate()
Exemplo n.º 16
0
def get_ucr_config(domain_link, report_config_id):
    from corehq.apps.userreports.models import DataSourceConfiguration, ReportConfiguration
    url = reverse('linked_domain:ucr_config', args=[domain_link.master_domain,
                                                    report_config_id])
    response = _do_request_to_remote_hq_json(url, domain_link.remote_details, domain_link.linked_domain)
    return {
        "report": ReportConfiguration.wrap(response["report"]),
        "datasource": DataSourceConfiguration.wrap(response["datasource"]),
    }
 def test_missing_no_named_in_named(self):
     bad_config = DataSourceConfiguration.wrap(
         self.indicator_configuration.to_json())
     bad_config.named_expressions['broken'] = {
         "type": "named",
         "name": "pregnant",
     }
     with self.assertRaises(BadSpecError):
         bad_config.validate()
Exemplo n.º 18
0
    def setUpClass(cls):
        super(TestTestCase2B, cls).setUpClass()

        test_data_source_file = os.path.join(
            os.path.abspath(os.path.join(os.path.dirname(__file__),
                                         os.pardir)), 'data_sources',
            TEST_DATA_SOURCE)

        with open(test_data_source_file) as f:
            cls.test_data_source = DataSourceConfiguration.wrap(
                json.loads(f.read())['config'])
            cls.named_expressions = cls.test_data_source.named_expression_objects
Exemplo n.º 19
0
    def setUp(self):
        folder = os.path.join(os.path.dirname(__file__), 'data', 'configs')
        sample_file = os.path.join(folder, 'sample_indicator_config.json')
        self.pillow = ConfigurableIndicatorPillow()
        self.engine = self.pillow.get_sql_engine()
        with open(sample_file) as f:
            structure = json.loads(f.read())
            self.config = DataSourceConfiguration.wrap(structure)
            self.pillow.bootstrap(configs=[self.config])

        self.adapter = IndicatorSqlAdapter(self.engine, self.config)
        self.adapter.rebuild_table()
Exemplo n.º 20
0
    def setUpClass(cls):
        super(TestDataSourceExpressions, cls).setUpClass()

        data_source_file = os.path.join(
            os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)),
            'ucr_data_sources',
            cls.data_source_name
        )

        with open(data_source_file, encoding='utf-8') as f:
            cls.data_source = DataSourceConfiguration.wrap(json.loads(f.read())['config'])
            cls.named_expressions = cls.data_source.named_expression_objects
Exemplo n.º 21
0
    def setUpClass(cls):
        super(TestDataSourceExpressions, cls).setUpClass()

        data_source_file = os.path.join(
            os.path.abspath(os.path.join(os.path.dirname(__file__),
                                         os.pardir)), 'data_sources',
            cls.data_source_name)

        with open(data_source_file) as f:
            cls.data_source = DataSourceConfiguration.wrap(
                json.loads(f.read())['config'])
            cls.named_expressions = cls.data_source.named_expression_objects
Exemplo n.º 22
0
    def setUpClass(cls):
        super(TestEpisode2B, cls).setUpClass()

        episode_file = os.path.join(
            os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)),
            'data_sources',
            EPISODE_DATA_SOURCE
        )

        with open(episode_file) as f:
            cls.episode = DataSourceConfiguration.wrap(json.loads(f.read())['config'])
            cls.named_expressions = cls.episode.named_expression_objects
Exemplo n.º 23
0
    def setUpClass(cls):
        super(UpNrhmTestCase, cls).setUpClass()

        data_source_file = os.path.join(
            os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)),
            'data_sources',
            cls.data_source_name
        )

        with open(data_source_file, encoding='utf-8') as f:
            cls.data_source = DataSourceConfiguration.wrap(json.loads(f.read())['config'])
            cls.named_expressions = cls.data_source.named_expression_objects
            cls.base_item_expression = cls.data_source.base_item_expression
Exemplo n.º 24
0
    def test_get_filtered_configs_es_error(self):
        table_manager = ConfigurableReportTableManagerMixin(
            MockDataSourceProvider(), filter_missing_domains=True)
        ds1 = get_sample_data_source()
        ds1.domain = 'domain1'
        ds2 = DataSourceConfiguration.wrap(ds1.to_json())
        ds2.domain = 'domain2'

        with patch('corehq.apps.es.es_query.run_query') as run_query:
            run_query.side_effect = ESError
            filtered_configs = table_manager.get_filtered_configs([ds1, ds2])

        self.assertEqual(filtered_configs, [ds1, ds2])
Exemplo n.º 25
0
 def test_missing_reference(self):
     bad_config = DataSourceConfiguration.wrap(self.indicator_configuration.to_json())
     bad_config.configured_indicators.append({
         "type": "expression",
         "column_id": "missing",
         "datatype": "string",
         "expression": {
             'type': 'named',
             'name': 'missing'
         }
     })
     with self.assertRaises(BadSpecError):
         bad_config.validate()
 def test_missing_reference(self):
     bad_config = DataSourceConfiguration.wrap(self.indicator_configuration.to_json())
     bad_config.configured_indicators.append({
         "type": "expression",
         "column_id": "missing",
         "datatype": "string",
         "expression": {
             'type': 'named',
             'name': 'missing'
         }
     })
     with self.assertRaises(BadSpecError):
         bad_config.validate()
Exemplo n.º 27
0
 def test_configured_filter_auto_date_convert(self):
     source = self.config.to_json()
     source["configured_filter"] = {
         "expression": {
             "datatype": "date",
             "expression": {"datatype": "date", "property_name": "visit_date", "type": "property_name"},
             "type": "root_doc",
         },
         "operator": "gt",
         "property_value": "2015-05-05",
         "type": "boolean_expression",
     }
     config = DataSourceConfiguration.wrap(source)
     config.validate()
Exemplo n.º 28
0
    def setUpClass(cls):
        super(UCRMultiDBTest, cls).setUpClass()
        cls.db2_name = 'cchq_ucr_tests'
        db_conn_parts = connections.connection_manager.get_connection_string('default').split('/')
        db_conn_parts[-1] = cls.db2_name
        cls.db2_url = '/'.join(db_conn_parts)

        cls.context_manager = connections.override_engine('engine-2', cls.db2_url, cls.db2_name)
        cls.context_manager.__enter__()

        # setup data sources
        data_source_template = get_sample_data_source()
        cls.ds_1 = DataSourceConfiguration.wrap(data_source_template.to_json())
        cls.ds_1.engine_id = 'engine-1'
        cls.ds_1.save()
        cls.ds_2 = DataSourceConfiguration.wrap(data_source_template.to_json())
        cls.ds_2.engine_id = 'engine-2'
        cls.ds_2.save()

        cls.db_context = temporary_database(cls.db2_name)
        cls.db_context.__enter__()

        cls.ds1_adapter = get_indicator_adapter(cls.ds_1)
        cls.ds2_adapter = get_indicator_adapter(cls.ds_2)
 def test_configured_filter_auto_date_convert(self):
     source = self.config.to_json()
     source['configured_filter'] = {
         "expression": {
             "datatype": "date",
             "expression": {
                 "datatype": "date",
                 "property_name": "visit_date",
                 "type": "property_name"
             },
             "type": "root_doc"
         },
         "operator": "gt",
         "property_value": "2015-05-05",
         "type": "boolean_expression"
     }
     config = DataSourceConfiguration.wrap(source)
     config.validate()
Exemplo n.º 30
0
 def setUpClass(cls) -> None:
     AllowedUCRExpressionSettings.save_allowed_ucr_expressions(
         'domain_nopermission', [])
     AllowedUCRExpressionSettings.save_allowed_ucr_expressions(
         'domain_baseitem', ['base_item_expression'])
     AllowedUCRExpressionSettings.save_allowed_ucr_expressions(
         'domain_related_doc', ['related_doc'])
     AllowedUCRExpressionSettings.save_allowed_ucr_expressions(
         'domain_both', ['related_doc', 'base_item_expression'])
     cls.config = get_sample_data_source()
     cls.config = cls.config.to_json()
     cls.config['configured_indicators'].append({
         "type":
         "expression",
         "is_primary_key":
         False,
         "is_nullable":
         True,
         "datatype":
         "string",
         "expression": {
             "value_expression": {
                 "datatype": None,
                 "type": "property_name",
                 "property_name": "name"
             },
             "type": "related_doc",
             "related_doc_type": "Location",
             "doc_id_expression": {
                 "datatype": None,
                 "type": "property_name",
                 "property_name": "health_post_id"
             }
         },
         "column_id":
         "health_post_name"
     })
     cls.config['base_item_expression'] = {
         "datatype": None,
         "property_name": "actions",
         "type": "property_name"
     }
     cls.config = DataSourceConfiguration.wrap(cls.config)
     return super().setUpClass()
Exemplo n.º 31
0
    def test_mirroring(self):
        ds3 = DataSourceConfiguration.wrap(get_sample_data_source().to_json())
        ds3.engine_id = "default"
        ds3.mirrored_engine_ids = ['engine-2']
        adapter = get_indicator_adapter(ds3)
        self.assertEqual(type(adapter.adapter), MultiDBSqlAdapter)
        self.assertEqual(len(adapter.all_adapters), 2)
        for db_adapter in adapter.all_adapters:
            with db_adapter.session_context() as session:
                self.assertEqual(0, session.query(db_adapter.get_table()).count())

        with patch('pillowtop.models.KafkaCheckpoint.get_or_create_for_checkpoint_id'):
            pillow = get_case_pillow(ucr_configs=[ds3])
        sample_doc, _ = get_sample_doc_and_indicators()
        pillow.process_change(doc_to_change(sample_doc))

        for db_adapter in adapter.all_adapters:
            with db_adapter.session_context() as session:
                self.assertEqual(1, session.query(db_adapter.get_table()).count())
Exemplo n.º 32
0
    def test_mirroring(self):
        ds3 = DataSourceConfiguration.wrap(get_sample_data_source().to_json())
        ds3.engine_id = DEFAULT_ENGINE_ID
        ds3.mirrored_engine_ids = ['engine-2']
        adapter = get_indicator_adapter(ds3)
        self.assertEqual(type(adapter.adapter), MultiDBSqlAdapter)
        self.assertEqual(len(adapter.all_adapters), 2)
        for db_adapter in adapter.all_adapters:
            with db_adapter.session_context() as session:
                self.assertEqual(0, session.query(db_adapter.get_table()).count())

        with patch('pillowtop.models.KafkaCheckpoint.get_or_create_for_checkpoint_id'):
            pillow = get_case_pillow(ucr_configs=[ds3])
        sample_doc, _ = get_sample_doc_and_indicators()
        pillow.process_change(doc_to_change(sample_doc))

        for db_adapter in adapter.all_adapters:
            with db_adapter.session_context() as session:
                self.assertEqual(1, session.query(db_adapter.get_table()).count())
Exemplo n.º 33
0
    def _create_data_source_config(self, indicators=None):
        default_indicator = [{
            "type": "expression",
            "column_id": "laugh_sound",
            "datatype": "string",
            "expression": {
                'type': 'named',
                'name': 'laugh_sound'
            }
        }]

        return DataSourceConfiguration.wrap({
            'display_name': 'Mother Indicators',
            'doc_type': 'DataSourceConfiguration',
            'domain': 'test',
            'referenced_doc_type': 'CommCareCase',
            'table_id': 'mother_indicators',
            'configured_filter': {},
            'configured_indicators': indicators or default_indicator
        })
Exemplo n.º 34
0
 def setUp(self):
     self.indicator_configuration = DataSourceConfiguration.wrap(
         {
             "display_name": "Mother Indicators",
             "doc_type": "DataSourceConfiguration",
             "domain": "test",
             "referenced_doc_type": "CommCareCase",
             "table_id": "mother_indicators",
             "named_filters": {
                 "pregnant": {
                     "type": "property_match",
                     "property_name": "mother_state",
                     "property_value": "pregnant",
                 },
                 "evil": {"type": "property_match", "property_name": "evil", "property_value": "yes"},
             },
             "configured_filter": {
                 "type": "and",
                 "filters": [
                     {"property_name": "type", "property_value": "ttc_mother", "type": "property_match"},
                     {"type": "named", "name": "pregnant"},
                 ],
             },
             "configured_indicators": [
                 {"type": "boolean", "column_id": "is_evil", "filter": {"type": "named", "name": "evil"}},
                 {
                     "type": "expression",
                     "column_id": "laugh_sound",
                     "datatype": "string",
                     "expression": {
                         "type": "conditional",
                         "test": {"type": "named", "name": "evil"},
                         "expression_if_true": {"type": "constant", "constant": "mwa-ha-ha"},
                         "expression_if_false": {"type": "constant", "constant": "hehe"},
                     },
                 },
             ],
         }
     )
Exemplo n.º 35
0
    def test_get_filtered_configs(self):
        table_manager = ConfigurableReportTableManagerMixin(
            MockDataSourceProvider(), filter_missing_domains=True)
        ds1 = get_sample_data_source()
        ds1.domain = 'domain1'
        ds2 = DataSourceConfiguration.wrap(ds1.to_json())
        ds2.domain = 'domain2'

        with patch('corehq.apps.es.es_query.run_query') as run_query:
            run_query.return_value = {
                'hits': {
                    'hits': [{
                        '_id': 'd1',
                        '_source': {
                            'name': 'domain1'
                        }
                    }]
                }
            }
            filtered_configs = table_manager.get_filtered_configs([ds1, ds2])

        self.assertEqual(filtered_configs, [ds1])
 def setUp(self):
     folder = os.path.join(os.path.dirname(__file__), 'data', 'configs')
     sample_file = os.path.join(folder, 'data_source_with_repeat.json')
     with open(sample_file) as f:
         self.config = DataSourceConfiguration.wrap(json.loads(f.read()))
Exemplo n.º 37
0
    def setUp(self):
        self.indicator_configuration = DataSourceConfiguration.wrap({
            'display_name': 'Mother Indicators',
            'doc_type': 'DataSourceConfiguration',
            'domain': 'test',
            'referenced_doc_type': 'CommCareCase',
            'table_id': 'mother_indicators',
            'named_expressions': {
                'pregnant': {
                    'type': 'property_name',
                    'property_name': 'pregnant',
                },
                'is_evil': {
                    'type': 'property_name',
                    'property_name': 'is_evil',
                },
                'laugh_sound': {
                    'type': 'conditional',
                    'test': {
                        'type': 'boolean_expression',
                        'expression': {
                            'type': 'property_name',
                            'property_name': 'is_evil',
                        },
                        'operator': 'eq',
                        'property_value': True,
                    },
                    'expression_if_true': "mwa-ha-ha",
                    'expression_if_false': "hehe",
                }
            },
            'named_filters': {},
            'configured_filter': {
                'type': 'boolean_expression',
                'expression': {
                    'type': 'named',
                    'name': 'pregnant'
                },
                'operator': 'eq',
                'property_value': 'yes',
            },
            'configured_indicators': [
                {
                    "type": "expression",
                    "column_id": "laugh_sound",
                    "datatype": "string",
                    "expression": {
                        'type': 'named',
                        'name': 'laugh_sound'
                    }
                },
                {
                    "type": "expression",
                    "column_id": "characterization",
                    "datatype": "string",
                    "expression": {
                        'type': 'conditional',
                        'test': {
                            'type': 'boolean_expression',
                            'expression': {
                                'type': 'named',
                                'name': 'is_evil',
                            },
                            'operator': 'eq',
                            'property_value': True,
                        },
                        'expression_if_true': "evil!",
                        'expression_if_false': "okay",
                    }
                },

            ]
        })
 def test_duplicate_columns(self):
     bad_config = DataSourceConfiguration.wrap(self.config.to_json())
     bad_config.configured_indicators.append(
         bad_config.configured_indicators[-1])
     with self.assertRaises(BadSpecError):
         bad_config.validate()
Exemplo n.º 39
0
 def setUp(self):
     folder = os.path.join(os.path.dirname(__file__), 'data', 'configs')
     sample_file = os.path.join(folder, 'sample_indicator_config.json')
     with open(sample_file) as f:
         structure = json.loads(f.read())
         self.config = DataSourceConfiguration.wrap(structure)
 def setUp(self):
     self.indicator_configuration = DataSourceConfiguration.wrap({
         'display_name':
         'Mother Indicators',
         'doc_type':
         'DataSourceConfiguration',
         'domain':
         'test',
         'referenced_doc_type':
         'CommCareCase',
         'table_id':
         'mother_indicators',
         'named_expressions': {
             'on_a_date': {
                 'type': 'property_name',
                 'property_name': 'on_date',
             }
         },
         'named_filters': {
             'pregnant': {
                 'type': 'property_match',
                 'property_name': 'mother_state',
                 'property_value': 'pregnant',
             },
             'evil': {
                 'type': 'property_match',
                 'property_name': 'evil',
                 'property_value': 'yes',
             },
             'has_alibi': {
                 'type': 'boolean_expression',
                 'expression': {
                     'type': 'named',
                     'name': 'on_a_date'
                 },
                 'operator': 'eq',
                 'property_value': 'yes',
             }
         },
         'configured_filter': {
             'type':
             'and',
             'filters': [{
                 'property_name': 'type',
                 'property_value': 'ttc_mother',
                 'type': 'property_match',
             }, {
                 'type': 'named',
                 'name': 'pregnant',
             }]
         },
         'configured_indicators': [{
             "type": "boolean",
             "column_id": "is_evil",
             "filter": {
                 "type": "named",
                 "name": "evil"
             }
         }, {
             "type": "expression",
             "column_id": "laugh_sound",
             "datatype": "string",
             "expression": {
                 'type': 'conditional',
                 'test': {
                     "type": "named",
                     "name": "evil"
                 },
                 'expression_if_true': {
                     'type': 'constant',
                     'constant': 'mwa-ha-ha',
                 },
                 'expression_if_false': {
                     'type': 'constant',
                     'constant': 'hehe',
                 },
             }
         }]
     })
def get_sample_data_source():
    folder = os.path.join(os.path.dirname(__file__), 'data', 'configs')
    sample_file = os.path.join(folder, 'sample_data_source.json')
    with open(sample_file) as f:
        structure = json.loads(f.read())
        return DataSourceConfiguration.wrap(structure)
Exemplo n.º 42
0
 def test_no_pk_attribute(self):
     bad_config = DataSourceConfiguration.wrap(
         self.indicator_configuration.to_json())
     bad_config.sql_settings.primary_key = ['doc_id', 'laugh_sound']
     with self.assertRaises(BadSpecError):
         bad_config.validate()
Exemplo n.º 43
0
 def test_missing_pk_column(self):
     bad_config = DataSourceConfiguration.wrap(self.indicator_configuration.to_json())
     bad_config.sql_settings.primary_key = ['doc_id', 'no_exist']
     with self.assertRaises(BadSpecError):
         bad_config.validate()
Exemplo n.º 44
0
def get_sample_data_source():
    folder = os.path.join(os.path.dirname(__file__), 'data', 'configs')
    sample_file = os.path.join(folder, 'sample_data_source.json')
    with open(sample_file) as f:
        structure = json.loads(f.read())
        return DataSourceConfiguration.wrap(structure)
Exemplo n.º 45
0
 def test_missing_pk_column(self):
     bad_config = DataSourceConfiguration.wrap(
         self.indicator_configuration.to_json())
     bad_config.sql_settings.primary_key = ['doc_id', 'no_exist']
     with self.assertRaises(BadSpecError):
         bad_config.validate()
Exemplo n.º 46
0
 def test_duplicate_columns(self):
     bad_config = DataSourceConfiguration.wrap(self.config.to_json())
     bad_config.configured_indicators.append(bad_config.configured_indicators[-1])
     with self.assertRaises(BadSpecError):
         bad_config.validate()
Exemplo n.º 47
0
 def test_no_pk_attribute(self):
     bad_config = DataSourceConfiguration.wrap(self.indicator_configuration.to_json())
     bad_config.sql_settings.primary_key = ['doc_id', 'laugh_sound']
     with self.assertRaises(BadSpecError):
         bad_config.validate()
Exemplo n.º 48
0
 def setUp(self):
     self.indicator_configuration = DataSourceConfiguration.wrap({
         'display_name': 'Mother Indicators',
         'doc_type': 'DataSourceConfiguration',
         'domain': 'test',
         'referenced_doc_type': 'CommCareCase',
         'table_id': 'mother_indicators',
         'named_expressions': {
             'on_a_date': {
                 'type': 'property_name',
                 'property_name': 'on_date',
             }
         },
         'named_filters': {
             'pregnant': {
                 'type': 'property_match',
                 'property_name': 'mother_state',
                 'property_value': 'pregnant',
             },
             'evil': {
                 'type': 'property_match',
                 'property_name': 'evil',
                 'property_value': 'yes',
             },
             'has_alibi': {
                 'type': 'boolean_expression',
                 'expression': {
                     'type': 'named',
                     'name': 'on_a_date'
                 },
                 'operator': 'eq',
                 'property_value': 'yes',
             }
         },
         'configured_filter': {
             'type': 'and',
             'filters': [
                 {
                     'property_name': 'type',
                     'property_value': 'ttc_mother',
                     'type': 'property_match',
                 },
                 {
                     'type': 'named',
                     'name': 'pregnant',
                 }
             ]
         },
         'configured_indicators': [
             {
                 "type": "boolean",
                 "column_id": "is_evil",
                 "filter": {
                     "type": "named",
                     "name": "evil"
                 }
             },
             {
                 "type": "expression",
                 "column_id": "laugh_sound",
                 "datatype": "string",
                 "expression": {
                     'type': 'conditional',
                     'test': {
                         "type": "named",
                         "name": "evil"
                     },
                     'expression_if_true': {
                         'type': 'constant',
                         'constant': 'mwa-ha-ha',
                     },
                     'expression_if_false': {
                         'type': 'constant',
                         'constant': 'hehe',
                     },
                 }
             }
         ]
     })
 def setUp(self):
     self.indicator_configuration = DataSourceConfiguration.wrap({
         'display_name':
         'Mother Indicators',
         'doc_type':
         'DataSourceConfiguration',
         'domain':
         'test',
         'referenced_doc_type':
         'CommCareCase',
         'table_id':
         'mother_indicators',
         'named_expressions': {
             'pregnant': {
                 'type': 'property_name',
                 'property_name': 'pregnant',
             },
             'is_evil': {
                 'type': 'property_name',
                 'property_name': 'is_evil',
             },
             'laugh_sound': {
                 'type': 'conditional',
                 'test': {
                     'type': 'boolean_expression',
                     'expression': {
                         'type': 'property_name',
                         'property_name': 'is_evil',
                     },
                     'operator': 'eq',
                     'property_value': True,
                 },
                 'expression_if_true': "mwa-ha-ha",
                 'expression_if_false': "hehe",
             }
         },
         'named_filters': {},
         'configured_filter': {
             'type': 'boolean_expression',
             'expression': {
                 'type': 'named',
                 'name': 'pregnant'
             },
             'operator': 'eq',
             'property_value': 'yes',
         },
         'configured_indicators': [
             {
                 "type": "expression",
                 "column_id": "laugh_sound",
                 "datatype": "string",
                 "expression": {
                     'type': 'named',
                     'name': 'laugh_sound'
                 }
             },
             {
                 "type": "expression",
                 "column_id": "characterization",
                 "datatype": "string",
                 "expression": {
                     'type': 'conditional',
                     'test': {
                         'type': 'boolean_expression',
                         'expression': {
                             'type': 'named',
                             'name': 'is_evil',
                         },
                         'operator': 'eq',
                         'property_value': True,
                     },
                     'expression_if_true': "evil!",
                     'expression_if_false': "okay",
                 }
             },
         ]
     })
Exemplo n.º 50
0
def get_data_source_with_related_doc_type():
    folder = os.path.join(os.path.dirname(__file__), 'data', 'configs')
    sample_file = os.path.join(folder, 'parent_child_data_source.json')
    with open(sample_file, encoding='utf-8') as f:
        structure = json.loads(f.read())
        return DataSourceConfiguration.wrap(structure)
Exemplo n.º 51
0
 def setUp(self):
     folder = os.path.join(os.path.dirname(__file__), 'data', 'configs')
     sample_file = os.path.join(folder, 'data_source_with_repeat.json')
     with open(sample_file) as f:
         self.config = DataSourceConfiguration.wrap(json.loads(f.read()))