Ejemplo n.º 1
0
 def test_import_from_basic_definition(self):
     spec = self.get_monthly_config_spec()
     data_source = DataSourceConfiguration(
         domain=spec.domain,
         referenced_doc_type='CommCareCase',
         table_id='some_table',
     )
     data_source.save()
     self.addCleanup(data_source.delete)
     # these just have to be valid data source objects
     spec.primary_table.data_source_id = data_source._id
     spec.secondary_tables[0].data_source_id = data_source._id
     aggregate_table_definition = import_aggregation_models_from_spec(spec)
     self.assertEqual(1, AggregateTableDefinition.objects.count())
     table_def = AggregateTableDefinition.objects.get(pk=aggregate_table_definition.pk)
     self.assertEqual(data_source._id, table_def.primary_data_source_id.hex)
     self.assertEqual(4, table_def.primary_columns.count())
     aggregation = table_def.time_aggregation
     self.assertEqual('month', aggregation.aggregation_unit)
     self.assertEqual('opened_date', aggregation.start_column)
     self.assertEqual('closed_date', aggregation.end_column)
     self.assertEqual(1, table_def.secondary_tables.count())
     secondary_table = table_def.secondary_tables.get()
     self.assertEqual(data_source._id, secondary_table.data_source_id.hex)
     self.assertEqual('doc_id', secondary_table.join_column_primary)
     self.assertEqual('form.case.@case_id', secondary_table.join_column_secondary)
     self.assertEqual('received_on', secondary_table.time_window_column)
     self.assertEqual(2, secondary_table.columns.count())
     self.assertEqual(
         set(['fu_forms_in_month', 'any_fu_forms_in_month']),
         set(secondary_table.columns.values_list('column_id', flat=True))
     )
Ejemplo n.º 2
0
    def setUpClass(cls):
        cls.db2_name = 'cchq_ucr_tests'
        db_conn_parts = settings.SQL_REPORTING_DATABASE_URL.split('/')
        db_conn_parts[-1] = cls.db2_name
        cls.db2_url = '/'.join(db_conn_parts)

        # setup patches
        cls.connection_string_patch = patch('corehq.sql_db.connections.connection_manager.get_connection_string')

        def connection_string_for_engine(engine_id):
            if engine_id == 'engine-1':
                return settings.SQL_REPORTING_DATABASE_URL
            else:
                return cls.db2_url

        mock_manager = cls.connection_string_patch.start()
        mock_manager.side_effect = connection_string_for_engine

        # setup data sources
        data_source_template = get_sample_data_source()
        cls.ds_1 = DataSourceConfiguration.wrap(data_source_template.to_json())
        cls.ds_1.engine_id = 'engine-1'
        cls.ds_1.save()
        cls.ds_2 = DataSourceConfiguration.wrap(data_source_template.to_json())
        cls.ds_2.engine_id = 'engine-2'
        cls.ds_2.save()

        cls.db_context = temporary_database(cls.db2_name)
        cls.db_context.__enter__()

        cls.ds1_adapter = IndicatorSqlAdapter(cls.ds_1)
        cls.ds2_adapter = IndicatorSqlAdapter(cls.ds_2)
Ejemplo n.º 3
0
    def update_report(self):
        from corehq.apps.userreports.views import delete_data_source_shared

        matching_data_source = self.ds_builder.get_existing_match()
        if matching_data_source:
            reactivated = False
            if matching_data_source._id != self.existing_report.config_id:

                # If no one else is using the current data source, delete it.
                data_source = DataSourceConfiguration.get(self.existing_report.config_id)
                if data_source.get_report_count() <= 1:
                    data_source.deactivate()

                self.existing_report.config_id = matching_data_source._id
            elif matching_data_source.is_deactivated:
                existing_sources = DataSourceConfiguration.by_domain(self.domain)
                active_sources = filter(lambda config: not config.is_deactivated, existing_sources)
                if len(active_sources) >= 5:
                    raise forms.ValidationError(_(
                        "Editing this report would require a new data source. The limit is 5. "
                        "To continue, first delete all of the reports using a particular "
                        "data source (or the data source itself) and try again. "
                    ))
                matching_data_source.is_deactivated = False
                reactivated = True
            changed = False
            indicators = self.ds_builder.indicators(self._number_columns)
            if matching_data_source.configured_indicators != indicators:
                matching_data_source.configured_indicators = indicators
                changed = True
            if changed or reactivated:
                matching_data_source.save()
                tasks.rebuild_indicators.delay(matching_data_source._id)
        else:
            # We need to create a new data source
            existing_sources = DataSourceConfiguration.by_domain(self.domain)
            active_sources = filter(lambda config: not config.is_deactivated, existing_sources)

            # Delete the old one if no other reports use it
            old_data_source = DataSourceConfiguration.get(self.existing_report.config_id)
            if old_data_source.get_report_count() <= 1:
                old_data_source.deactivate()

            # Make sure the user can create more data sources
            elif len(active_sources) >= 5:
                raise forms.ValidationError(_(
                    "Editing this report would require a new data source. The limit is 5. "
                    "To continue, first delete all of the reports using a particular "
                    "data source (or the data source itself) and try again. "
                ))
            data_source_config_id = self._build_data_source()
            self.existing_report.config_id = data_source_config_id

        self.existing_report.aggregation_columns = self._report_aggregation_cols
        self.existing_report.columns = self._report_columns
        self.existing_report.filters = self._report_filters
        self.existing_report.configured_charts = self._report_charts
        self.existing_report.validate()
        self.existing_report.save()
        return self.existing_report
Ejemplo n.º 4
0
 def setUpClass(cls):
     data_source = DataSourceConfiguration(
         domain=cls.DOMAIN,
         table_id="foo",
         referenced_doc_type="CommCareCase",
     )
     data_source.save()
     ReportConfiguration(
         domain=cls.DOMAIN,
         config_id=data_source._id,
         columns=[
             {
                 "type": "field",
                 "field": "foo",
                 "column_id": "foo",
                 "aggregation": "simple",
                 "display": "My Column",
             },
             {
                 "type": "field",
                 "field": "bar",
                 "column_id": "bar",
                 "aggregation": "simple",
                 "display": {"en": "Name", "fra": "Nom"},
             },
         ]
     ).save()
Ejemplo n.º 5
0
    def _create_data_source(cls):
        cls.data_sources = {}
        cls.adapters = {}

        for backend_id in UCR_BACKENDS:
            config = DataSourceConfiguration(
                backend_id=backend_id,
                domain=cls.domain,
                display_name=cls.domain,
                referenced_doc_type='CommCareCase',
                table_id="foo",
                configured_filter={
                    "type": "boolean_expression",
                    "operator": "eq",
                    "expression": {
                        "type": "property_name",
                        "property_name": "type"
                    },
                    "property_value": cls.case_type,
                },
                configured_indicators=[
                    {
                        "type": "expression",
                        "expression": {
                            "type": "property_name",
                            "property_name": 'state'
                        },
                        "column_id": 'indicator_col_id_state',
                        "display_name": 'indicator_display_name_state',
                        "datatype": "string"
                    },
                    {
                        "type": "expression",
                        "expression": {
                            "type": "property_name",
                            "property_name": 'city'
                        },
                        "column_id": 'indicator_col_id_city',
                        "display_name": 'indicator_display_name_city',
                        "datatype": "string"
                    },
                    {
                        "type": "expression",
                        "expression": {
                            "type": "property_name",
                            "property_name": 'number'
                        },
                        "column_id": 'indicator_col_id_number',
                        "datatype": "integer"
                    },
                ],
            )
            config.validate()
            config.save()
            rebuild_indicators(config._id)
            adapter = get_indicator_adapter(config)
            adapter.refresh_table()
            cls.data_sources[backend_id] = config
            cls.adapters[backend_id] = adapter
Ejemplo n.º 6
0
    def test_get_by_domain(self):
        results = DataSourceConfiguration.by_domain('foo')
        self.assertEqual(2, len(results))
        for item in results:
            self.assertTrue(item.table_id in ('foo1', 'foo2'))

        results = DataSourceConfiguration.by_domain('not-foo')
        self.assertEqual(0, len(results))
Ejemplo n.º 7
0
def set_default_engine_ids(apps, schema_editor):
    if not settings.UNIT_TESTING:
        get_preindex_plugin('userreports').sync_design_docs()
        ucr_db = DataSourceConfiguration.get_db()
        with IterDB(ucr_db) as iter_db:
            for doc in iter_docs(ucr_db, DataSourceConfiguration.all_ids()):
                if not doc.get('engine_id'):
                    doc['engine_id'] = DEFAULT_ENGINE_ID
                    iter_db.save(doc)
Ejemplo n.º 8
0
def set_default_engine_ids(apps, schema_editor):
    if not settings.UNIT_TESTING:
        sync_docs.sync(ucr_models, verbosity=2)
        ucr_db = DataSourceConfiguration.get_db()
        with IterDB(ucr_db) as iter_db:
            for doc in iter_docs(ucr_db, DataSourceConfiguration.all_ids()):
                if not doc.get("engine_id"):
                    doc["engine_id"] = DEFAULT_ENGINE_ID
                    iter_db.save(doc)
Ejemplo n.º 9
0
    def _build_report(self, vals, field="my_field", build_data_source=True):
        """
        Build a new report, and populate it with cases.

        Return a ConfigurableReportDataSource and a FieldColumn
        :param vals: List of values to populate the given report field with.
        :param field: The name of a field in the data source/report
        :return: Tuple containing a ConfigurableReportDataSource and FieldColumn.
        The column is a column mapped to the given field.
        """

        # Create Cases
        for v in vals:
            self._new_case({field: v}).save()

        # Create report
        data_source_config = DataSourceConfiguration(
            domain=self.domain,
            display_name="foo",
            referenced_doc_type="CommCareCase",
            table_id=_clean_table_name(self.domain, str(uuid.uuid4().hex)),
            configured_filter={
                "type": "boolean_expression",
                "operator": "eq",
                "expression": {"type": "property_name", "property_name": "type"},
                "property_value": self.case_type,
            },
            configured_indicators=[
                {
                    "type": "expression",
                    "expression": {"type": "property_name", "property_name": field},
                    "column_id": field,
                    "display_name": field,
                    "datatype": "string",
                }
            ],
        )
        data_source_config.validate()
        data_source_config.save()
        if build_data_source:
            tasks.rebuild_indicators(data_source_config._id)

        report_config = ReportConfiguration(
            domain=self.domain,
            config_id=data_source_config._id,
            title="foo",
            aggregation_columns=["doc_id"],
            columns=[{"type": "expanded", "field": field, "display": field, "format": "default"}],
            filters=[],
            configured_charts=[],
        )
        report_config.save()
        data_source = ReportFactory.from_spec(report_config)

        return data_source, data_source.column_configs[0]
Ejemplo n.º 10
0
    def _create_data_source(cls):
        cls.data_sources = {}
        cls.adapters = {}

        config = DataSourceConfiguration(
            domain=cls.domain,
            display_name=cls.domain,
            referenced_doc_type='CommCareCase',
            table_id="foo",
            configured_filter={
                "type": "boolean_expression",
                "operator": "eq",
                "expression": {
                    "type": "property_name",
                    "property_name": "type"
                },
                "property_value": cls.case_type,
            },
            configured_indicators=[
                {
                    "type": "expression",
                    "expression": {
                        "type": "property_name",
                        "property_name": 'my_date'
                    },
                    "column_id": 'date_as_string',
                    "display_name": 'date_as_string',
                    "datatype": "string"
                },
                {
                    "type": "expression",
                    "expression": {
                        "type": "property_name",
                        "property_name": 'my_date'
                    },
                    "column_id": 'date_as_date',
                    "datatype": "date"
                },
                {
                    "type": "expression",
                    "expression": {
                        "type": "property_name",
                        "property_name": "my_datetime",
                    },
                    "column_id": "datetime_as_datetime",
                    "datatype": "datetime"
                }
            ],
        )
        config.validate()
        config.save()
        rebuild_indicators(config._id)
        adapter = get_indicator_adapter(config)
        cls.data_sources[UCR_SQL_BACKEND] = config
        cls.adapters[UCR_SQL_BACKEND] = adapter
Ejemplo n.º 11
0
    def _create_data_source(cls):
        cls.data_sources = {}
        cls.adapters = {}

        # this is a hack to have both sql and es backends created in a class
        # method. alternative would be to have these created on each test run
        for backend_id in UCR_BACKENDS:
            config = DataSourceConfiguration(
                backend_id=backend_id,
                domain=cls.domain,
                display_name=cls.domain,
                referenced_doc_type='CommCareCase',
                table_id="foo",
                configured_filter={
                    "type": "boolean_expression",
                    "operator": "eq",
                    "expression": {
                        "type": "property_name",
                        "property_name": "type"
                    },
                    "property_value": cls.case_type,
                },
                configured_indicators=[
                    {
                        "type": "expression",
                        "expression": {
                            "type": "property_name",
                            "property_name": 'first_name'
                        },
                        "column_id": 'indicator_col_id_first_name',
                        "display_name": 'indicator_display_name_first_name',
                        "datatype": "string"
                    },
                    {
                        "type": "expression",
                        "expression": {
                            "type": "property_name",
                            "property_name": 'number'
                        },
                        "column_id": 'indicator_col_id_number',
                        "datatype": "integer"
                    },
                ],
            )
            config.validate()
            config.save()
            rebuild_indicators(config._id)
            adapter = get_indicator_adapter(config)
            adapter.refresh_table()
            cls.data_sources[backend_id] = config
            cls.adapters[backend_id] = adapter
Ejemplo n.º 12
0
    def setUpClass(cls):
        cls.db2_name = 'cchq_ucr_tests'
        db_conn_parts = settings.SQL_REPORTING_DATABASE_URL.split('/')
        db_conn_parts[-1] = cls.db2_name
        cls.db2_url = '/'.join(db_conn_parts)

        # setup patches
        cls.engine_id_patches = (
            # unfortunately we need to patch this directly in modules that import it as well
            patch('corehq.apps.userreports.sql.connection.get_engine_id'),
            patch('corehq.apps.userreports.sql.adapter.get_engine_id'),
            patch('corehq.apps.userreports.reports.data_source.get_engine_id'),
        )
        cls.connection_string_patch = patch('corehq.db.connection_manager.get_connection_string')
        for engine_id_patch in cls.engine_id_patches:
            mock_engine_id_method = engine_id_patch.start()
            mock_engine_id_method.side_effect = lambda x: x.engine_id

        def connection_string_for_engine(engine_id):
            if engine_id == 'engine-1':
                return settings.SQL_REPORTING_DATABASE_URL
            else:
                return cls.db2_url

        mock_manager = cls.connection_string_patch.start()
        mock_manager.side_effect = connection_string_for_engine

        # setup data sources
        data_source_template = get_sample_data_source()
        cls.ds_1 = DataSourceConfiguration.wrap(data_source_template.to_json())
        cls.ds_1.engine_id = 'engine-1'
        cls.ds_1.save()
        cls.ds_2 = DataSourceConfiguration.wrap(data_source_template.to_json())
        cls.ds_2.engine_id = 'engine-2'
        cls.ds_2.save()

        # use db1 engine to create db2 http://stackoverflow.com/a/8977109/8207
        cls.root_engine = create_engine(settings.SQL_REPORTING_DATABASE_URL)
        conn = cls.root_engine.connect()
        conn.execute('commit')
        try:
            conn.execute('CREATE DATABASE {}'.format(cls.db2_name))
        except ProgrammingError:
            # optimistically assume it failed because was already created.
            pass
        conn.close()

        cls.ds1_adapter = IndicatorSqlAdapter(cls.ds_1)
        cls.ds2_adapter = IndicatorSqlAdapter(cls.ds_2)
Ejemplo n.º 13
0
 def __init__(self, domain, *args, **kwargs):
     self.domain = domain
     available_data_sources = DataSourceConfiguration.by_domain(self.domain)
     super(ReportDataSourceField, self).__init__(
         choices=[(src._id, src.display_name) for src in available_data_sources],
         *args, **kwargs
     )
Ejemplo n.º 14
0
 def setUp(self):
     self.indicator_configuration = DataSourceConfiguration.wrap({
         'display_name': 'Mother Indicators',
         'doc_type': 'DataSourceConfiguration',
         'domain': 'test',
         'referenced_doc_type': 'CommCareCase',
         'table_id': 'mother_indicators',
         'named_filters': {
             'pregnant': {
                 'type': 'property_match',
                 'property_name': 'mother_state',
                 'property_value': 'pregnant',
             }
         },
         'configured_filter': {
             'type': 'and',
             'filters': [
                 {
                     'property_name': 'type',
                     'property_value': 'ttc_mother',
                     'type': 'property_match',
                 },
                 {
                     'type': 'named',
                     'name': 'pregnant',
                 }
             ]
         }
     })
Ejemplo n.º 15
0
    def setUp(self):
        self.domain_obj = create_domain(self.domain)
        es = get_es_new()
        initialize_index_and_mapping(es, USER_INDEX_INFO)
        self.region = LocationType.objects.create(domain=self.domain, name="region")
        self.town = LocationType.objects.create(domain=self.domain, name="town", parent_type=self.region)

        self.data_source_config = DataSourceConfiguration(
            domain=self.domain,
            display_name='Locations in Westworld',
            referenced_doc_type='Location',
            table_id=clean_table_name(self.domain, str(uuid.uuid4().hex)),
            configured_filter={},
            configured_indicators=[{
                "type": "expression",
                "expression": {
                    "type": "property_name",
                    "property_name": "name"
                },
                "column_id": "location_name",
                "display_name": "location_name",
                "datatype": "string"
            }],
        )
        self.data_source_config.validate()
        self.data_source_config.save()

        self.pillow = get_location_pillow(ucr_configs=[self.data_source_config])
        self.pillow.get_change_feed().get_latest_offsets()
Ejemplo n.º 16
0
    def handle(self, **options):
        data_sources = list(DataSourceConfiguration.all())
        data_sources.extend(list(StaticDataSourceConfiguration.all()))

        engine_ids = self._get_engine_ids(data_sources, options.get('engine_id'))

        tables_to_remove_by_engine = defaultdict(list)
        for engine_id in engine_ids:
            engine = connection_manager.get_engine(engine_id)
            with engine.begin() as connection:
                migration_context = get_migration_context(connection, include_object=_include_object)
                raw_diffs = compare_metadata(migration_context, metadata)

            diffs = reformat_alembic_diffs(raw_diffs)
            tables_to_remove_by_engine[engine_id] = [
                diff.table_name for diff in diffs
                if diff.type == 'remove_table'
            ]

        for engine_id, tablenames in tables_to_remove_by_engine.items():
            engine = connection_manager.get_engine(engine_id)
            for tablename in tablenames:
                with engine.begin() as connection:
                    try:
                        result = connection.execute(
                            'SELECT COUNT(*), MAX(inserted_at) FROM "{tablename}"'.format(tablename=tablename)
                        )
                    except Exception:
                        print(tablename, "no inserted_at column, probably not UCR")
                    else:
                        print(tablename, result.fetchone())
Ejemplo n.º 17
0
def rebuild_indicators(indicator_config_id):
    is_static = indicator_config_id.startswith(CustomDataSourceConfiguration._datasource_id_prefix)
    if is_static:
        config = CustomDataSourceConfiguration.by_id(indicator_config_id)
    else:
        config = DataSourceConfiguration.get(indicator_config_id)
        # Save the start time now in case anything goes wrong. This way we'll be
        # able to see if the rebuild started a long time ago without finishing.
        config.meta.build.initiated = datetime.datetime.utcnow()
        config.save()

    adapter = IndicatorSqlAdapter(config)
    adapter.rebuild_table()

    couchdb = _get_db(config.referenced_doc_type)
    relevant_ids = get_doc_ids(config.domain, config.referenced_doc_type,
                               database=couchdb)

    for doc in iter_docs(couchdb, relevant_ids, chunksize=500):
        try:
            # save is a noop if the filter doesn't match
            adapter.save(doc)
        except DataError as e:
            logging.exception('problem saving document {} to table. {}'.format(doc['_id'], e))

    if not is_static:
        config.meta.build.finished = True
        config.save()
Ejemplo n.º 18
0
def _shared_context(domain):
    custom_data_sources = list(CustomDataSourceConfiguration.by_domain(domain))
    return {
        'domain': domain,
        'reports': ReportConfiguration.by_domain(domain),
        'data_sources': DataSourceConfiguration.by_domain(domain) + custom_data_sources,
    }
Ejemplo n.º 19
0
    def bootstrap(self, configs=None):
        # sets up the initial stuff
        if configs is None:
            configs = DataSourceConfiguration.all()

        self.tables = [IndicatorSqlAdapter(self.get_sql_engine(), config) for config in configs]
        self.bootstrapped = True
Ejemplo n.º 20
0
def get_datasources_for_domain(domain, referenced_doc_type=None, include_static=False, include_aggregate=False):
    from corehq.apps.userreports.models import DataSourceConfiguration, StaticDataSourceConfiguration
    key = [domain]
    if referenced_doc_type:
        key.append(referenced_doc_type)
    datasources = sorted(
        DataSourceConfiguration.view(
            'userreports/data_sources_by_build_info',
            startkey=key,
            endkey=key + [{}],
            reduce=False,
            include_docs=True
        ),
        key=lambda config: config.display_name or '')

    if include_static:
        static_ds = StaticDataSourceConfiguration.by_domain(domain)
        if referenced_doc_type:
            static_ds = [ds for ds in static_ds if ds.referenced_doc_type == referenced_doc_type]
        datasources.extend(sorted(static_ds, key=lambda config: config.display_name))

    if include_aggregate:
        from corehq.apps.aggregate_ucrs.models import AggregateTableDefinition
        datasources.extend(AggregateTableDefinition.objects.filter(domain=domain).all())
    return datasources
Ejemplo n.º 21
0
    def setUp(self):
        delete_all_locations()
        self.domain_obj = create_domain(self.domain)

        self.region = LocationType.objects.create(domain=self.domain, name="region")
        self.town = LocationType.objects.create(domain=self.domain, name="town", parent_type=self.region)

        self.data_source_config = DataSourceConfiguration(
            domain=self.domain,
            display_name='Locations in Westworld',
            referenced_doc_type='Location',
            table_id=_clean_table_name(self.domain, str(uuid.uuid4().hex)),
            configured_filter={},
            configured_indicators=[{
                "type": "expression",
                "expression": {
                    "type": "property_name",
                    "property_name": "name"
                },
                "column_id": "location_name",
                "display_name": "location_name",
                "datatype": "string"
            }],
        )
        self.data_source_config.validate()
        self.data_source_config.save()

        self.pillow = get_kafka_ucr_pillow()
        self.pillow.bootstrap(configs=[self.data_source_config])
        with trap_extra_setup(KafkaUnavailableError):
            self.pillow.get_change_feed().get_current_offsets()
Ejemplo n.º 22
0
 def test_stale_rebuild(self):
     # rebuild indicators in another test will save this
     later_config = DataSourceConfiguration.get(self.config._id)
     later_config.save()
     self.assertNotEqual(self.config._rev, later_config._rev)
     with self.assertRaises(StaleRebuildError):
         self.pillow.processors[0].rebuild_table(get_indicator_adapter(self.config))
Ejemplo n.º 23
0
def _build_indicators(indicator_config_id, relevant_ids):
    config = _get_config_by_id(indicator_config_id)
    adapter = IndicatorSqlAdapter(config)
    couchdb = _get_db(config.referenced_doc_type)
    redis_client = get_redis_client().client.get_client()
    redis_key = _get_redis_key_for_config(config)

    for doc in iter_docs(couchdb, relevant_ids, chunksize=500):
        try:
            # save is a noop if the filter doesn't match
            adapter.save(doc)
            redis_client.srem(redis_key, doc.get('_id'))
        except Exception as e:
            logging.exception('problem saving document {} to table. {}'.format(doc['_id'], e))

    if not is_static(indicator_config_id):
        redis_client.delete(redis_key)
        config.meta.build.finished = True
        try:
            config.save()
        except ResourceConflict:
            current_config = DataSourceConfiguration.get(config._id)
            # check that a new build has not yet started
            if config.meta.build.initiated == current_config.meta.build.initiated:
                current_config.meta.build.finished = True
                current_config.save()
Ejemplo n.º 24
0
    def clean(self):
        """
        Raise a validation error if there are already 5 data sources and this
        report won't be able to use one of the existing ones.
        """
        cleaned_data = super(DataSourceForm, self).clean()
        source_type = cleaned_data.get('source_type')
        report_source = cleaned_data.get('report_source')
        app_id = cleaned_data.get('application')

        if report_source and source_type and app_id:

            app = Application.get(app_id)
            ds_builder = DataSourceBuilder(self.domain, app, source_type, report_source)

            existing_sources = DataSourceConfiguration.by_domain(self.domain)
            if len(existing_sources) >= 5:
                if not ds_builder.get_existing_match():
                    raise forms.ValidationError(_(
                        "Too many data sources!\n"
                        "Creating this report would cause you to go over the maximum "
                        "number of data sources allowed in this domain. The current "
                        "limit is 5. "
                        "To continue, delete all of the reports using a particular "
                        "data source (or the data source itself) and try again. "
                    ))

        return cleaned_data
Ejemplo n.º 25
0
 def tearDownClass(cls):
     for config in DataSourceConfiguration.all():
         config.delete()
     delete_all_report_configs()
     for domain in Domain.get_all():
         domain.delete()
     super(ReportConfigurationDbTest, cls).tearDownClass()
Ejemplo n.º 26
0
 def test_last_modified_date(self):
     start = datetime.datetime.utcnow()
     time.sleep(.01)
     data_source = DataSourceConfiguration(
         domain='mod-test', table_id='mod-test', referenced_doc_type='mod-test'
     )
     data_source.save()
     self.assertTrue(start < data_source.last_modified)
     time.sleep(.01)
     between = datetime.datetime.utcnow()
     self.assertTrue(between > data_source.last_modified)
     time.sleep(.01)
     data_source.save()
     time.sleep(.01)
     self.assertTrue(between < data_source.last_modified)
     self.assertTrue(datetime.datetime.utcnow() > data_source.last_modified)
Ejemplo n.º 27
0
def _iteratively_build_table(config, last_id=None, resume_helper=None):
    resume_helper = resume_helper or DataSourceResumeHelper(config)
    indicator_config_id = config._id

    relevant_ids = []
    document_store = get_document_store(config.domain, config.referenced_doc_type)
    for relevant_id in document_store.iter_document_ids(last_id):
        relevant_ids.append(relevant_id)
        if len(relevant_ids) >= ID_CHUNK_SIZE:
            resume_helper.set_ids_to_resume_from(relevant_ids)
            _build_indicators(config, document_store, relevant_ids, resume_helper)
            relevant_ids = []

    if relevant_ids:
        resume_helper.set_ids_to_resume_from(relevant_ids)
        _build_indicators(config, document_store, relevant_ids, resume_helper)

    if not id_is_static(indicator_config_id):
        resume_helper.clear_ids()
        config.meta.build.finished = True
        try:
            config.save()
        except ResourceConflict:
            current_config = DataSourceConfiguration.get(config._id)
            # check that a new build has not yet started
            if config.meta.build.initiated == current_config.meta.build.initiated:
                current_config.meta.build.finished = True
                current_config.save()
Ejemplo n.º 28
0
def _get_all_data_sources():
    from corehq.apps.userreports.models import DataSourceConfiguration
    return DataSourceConfiguration.view(
        'userreports/data_sources_by_build_info',
        reduce=False,
        include_docs=True
    )
Ejemplo n.º 29
0
 def test_missing_no_named_in_named(self):
     bad_config = DataSourceConfiguration.wrap(self.indicator_configuration.to_json())
     bad_config.named_expressions['broken'] = {
         "type": "named",
         "name": "pregnant",
     }
     with self.assertRaises(BadSpecError):
         bad_config.validate()
Ejemplo n.º 30
0
def _shared_context(domain):
    static_reports = list(StaticReportConfiguration.by_domain(domain))
    static_data_sources = list(StaticDataSourceConfiguration.by_domain(domain))
    return {
        'domain': domain,
        'reports': ReportConfiguration.by_domain(domain) + static_reports,
        'data_sources': DataSourceConfiguration.by_domain(domain) + static_data_sources,
    }
Ejemplo n.º 31
0
 def test_domain_is_required(self):
     with self.assertRaises(BadValueError):
         DataSourceConfiguration(table_id='table',
                                 referenced_doc_type='doc').save()
Ejemplo n.º 32
0
    def setUpClass(cls):
        super(TestConfigurableReportDataResource, cls).setUpClass()

        case_type = "my_case_type"
        cls.field_name = "my_field"
        cls.case_property_values = ["foo", "foo", "bar", "baz"]

        cls.cases = []
        for val in cls.case_property_values:
            id = uuid.uuid4().hex
            case_block = CaseBlock.deprecated_init(
                create=True,
                case_id=id,
                case_type=case_type,
                update={
                    cls.field_name: val
                },
            ).as_xml()
            post_case_blocks([case_block], {'domain': cls.domain.name})
            cls.cases.append(CommCareCase.get(id))

        cls.report_columns = [{
            "column_id": cls.field_name,
            "type": "field",
            "field": cls.field_name,
            "aggregation": "simple",
        }]
        cls.report_filters = [{
            'datatype': 'string',
            'field': cls.field_name,
            'type': 'dynamic_choice_list',
            'slug': 'my_field_filter',
        }]

        cls.data_source = DataSourceConfiguration(
            domain=cls.domain.name,
            referenced_doc_type="CommCareCase",
            table_id=uuid.uuid4().hex,
            configured_filter={
                "type": "boolean_expression",
                "operator": "eq",
                "expression": {
                    "type": "property_name",
                    "property_name": "type"
                },
                "property_value": case_type,
            },
            configured_indicators=[
                {
                    "type": "expression",
                    "expression": {
                        "type": "property_name",
                        "property_name": cls.field_name
                    },
                    "column_id": cls.field_name,
                    "display_name": cls.field_name,
                    "datatype": "string"
                },
                {
                    "type": "expression",
                    "expression": {
                        "type": "property_name",
                        "property_name": "opened_by"
                    },
                    "column_id": "opened_by",
                    "display_name": "opened_by",
                    "datatype": "string"
                },
            ],
        )
        cls.data_source.validate()
        cls.data_source.save()
        rebuild_indicators(cls.data_source._id)

        cls.report_configuration = ReportConfiguration(
            domain=cls.domain.name,
            config_id=cls.data_source._id,
            aggregation_columns=["doc_id"],
            columns=cls.report_columns,
            filters=cls.report_filters,
        )
        cls.report_configuration.save()
Ejemplo n.º 33
0
    def tearDown(self):
        delete_all_report_configs()
        for config in DataSourceConfiguration.all():
            config.delete()

        super().tearDown()
Ejemplo n.º 34
0
    def _build_report(self, vals, field='my_field', build_data_source=True):
        """
        Build a new report, and populate it with cases.

        Return a ConfigurableReportDataSource and a FieldColumn
        :param vals: List of values to populate the given report field with.
        :param field: The name of a field in the data source/report
        :return: Tuple containing a ConfigurableReportDataSource and FieldColumn.
        The column is a column mapped to the given field.
        """

        # Create Cases
        for v in vals:
            update_props = {field: v} if v is not None else {}
            self._new_case(update_props).save()

        # Create report
        data_source_config = DataSourceConfiguration(
            domain=self.domain,
            display_name='foo',
            referenced_doc_type='CommCareCase',
            table_id=_clean_table_name(self.domain, str(uuid.uuid4().hex)),
            configured_filter={
                "type": "boolean_expression",
                "operator": "eq",
                "expression": {
                    "type": "property_name",
                    "property_name": "type"
                },
                "property_value": self.case_type,
            },
            configured_indicators=[{
                "type": "expression",
                "expression": {
                    "type": "property_name",
                    "property_name": field
                },
                "column_id": field,
                "display_name": field,
                "datatype": "string"
            }],
        )
        data_source_config.validate()
        data_source_config.save()
        self.addCleanup(data_source_config.delete)
        if build_data_source:
            tasks.rebuild_indicators(data_source_config._id)
            adapter = get_indicator_adapter(data_source_config)
            adapter.refresh_table()

        report_config = ReportConfiguration(domain=self.domain,
                                            config_id=data_source_config._id,
                                            title='foo',
                                            aggregation_columns=['doc_id'],
                                            columns=[{
                                                "type": "expanded",
                                                "field": field,
                                                "display": field,
                                                "format": "default",
                                            }],
                                            filters=[],
                                            configured_charts=[])
        report_config.save()
        self.addCleanup(report_config.delete)
        data_source = ReportFactory.from_spec(report_config)
        adapter = get_indicator_adapter(data_source_config)
        if build_data_source:
            adapter.refresh_table()

        return data_source, data_source.top_level_columns[0]
Ejemplo n.º 35
0
def create_data_source(request, domain):
    return _edit_data_source_shared(request, domain,
                                    DataSourceConfiguration(domain=domain))
Ejemplo n.º 36
0
def _get_all_data_sources():
    from corehq.apps.userreports.models import DataSourceConfiguration
    return DataSourceConfiguration.view(
        'userreports/data_sources_by_build_info',
        reduce=False,
        include_docs=True)
Ejemplo n.º 37
0
 def setUp(self):
     self.indicator_configuration = DataSourceConfiguration.wrap({
         'display_name':
         'Mother Indicators',
         'doc_type':
         'DataSourceConfiguration',
         'domain':
         'test',
         'referenced_doc_type':
         'CommCareCase',
         'table_id':
         'mother_indicators',
         'named_expressions': {
             'pregnant': {
                 'type': 'property_name',
                 'property_name': 'pregnant',
             },
             'is_evil': {
                 'type': 'property_name',
                 'property_name': 'is_evil',
             },
             'laugh_sound': {
                 'type': 'conditional',
                 'test': {
                     'type': 'boolean_expression',
                     'expression': {
                         'type': 'property_name',
                         'property_name': 'is_evil',
                     },
                     'operator': 'eq',
                     'property_value': True,
                 },
                 'expression_if_true': "mwa-ha-ha",
                 'expression_if_false': "hehe",
             }
         },
         'named_filters': {},
         'configured_filter': {
             'type': 'boolean_expression',
             'expression': {
                 'type': 'named',
                 'name': 'pregnant'
             },
             'operator': 'eq',
             'property_value': 'yes',
         },
         'configured_indicators': [
             {
                 "type": "expression",
                 "column_id": "laugh_sound",
                 "datatype": "string",
                 "expression": {
                     'type': 'named',
                     'name': 'laugh_sound'
                 }
             },
             {
                 "type": "expression",
                 "column_id": "characterization",
                 "datatype": "string",
                 "expression": {
                     'type': 'conditional',
                     'test': {
                         'type': 'boolean_expression',
                         'expression': {
                             'type': 'named',
                             'name': 'is_evil',
                         },
                         'operator': 'eq',
                         'property_value': True,
                     },
                     'expression_if_true': "evil!",
                     'expression_if_false': "okay",
                 }
             },
         ]
     })
Ejemplo n.º 38
0
 def test_duplicate_columns(self):
     bad_config = DataSourceConfiguration.wrap(self.config.to_json())
     bad_config.configured_indicators.append(
         bad_config.configured_indicators[-1])
     with self.assertRaises(BadSpecError):
         bad_config.validate()
Ejemplo n.º 39
0
 def tearDownClass(cls):
     for config in DataSourceConfiguration.all():
         config.delete()
Ejemplo n.º 40
0
class TestLocationDataSource(TestCase):
    domain = "delos_corp"

    def setUp(self):
        delete_all_locations()
        self.domain_obj = create_domain(self.domain)

        self.region = LocationType.objects.create(domain=self.domain,
                                                  name="region")
        self.town = LocationType.objects.create(domain=self.domain,
                                                name="town",
                                                parent_type=self.region)

        self.data_source_config = DataSourceConfiguration(
            domain=self.domain,
            display_name='Locations in Westworld',
            referenced_doc_type='Location',
            table_id=_clean_table_name(self.domain, str(uuid.uuid4().hex)),
            configured_filter={},
            configured_indicators=[{
                "type": "expression",
                "expression": {
                    "type": "property_name",
                    "property_name": "name"
                },
                "column_id": "location_name",
                "display_name": "location_name",
                "datatype": "string"
            }],
        )
        self.data_source_config.validate()
        self.data_source_config.save()

        self.pillow = get_kafka_ucr_pillow()
        self.pillow.bootstrap(configs=[self.data_source_config])
        with trap_extra_setup(KafkaUnavailableError):
            self.pillow.get_change_feed().get_current_offsets()

    def tearDown(self):
        self.domain_obj.delete()
        delete_all_locations()
        self.data_source_config.delete()

    def _make_loc(self, name, location_type):
        return SQLLocation.objects.create(domain=self.domain,
                                          name=name,
                                          site_code=name,
                                          location_type=location_type)

    def assertDataSourceAccurate(self, expected_locations):
        adapter = get_indicator_adapter(self.data_source_config)
        query = adapter.get_query_object()
        adapter.refresh_table()
        data_source = query.all()
        self.assertItemsEqual(expected_locations,
                              [row[-1] for row in data_source])

    def test_location_data_source(self):
        self._make_loc("Westworld", self.region)
        sweetwater = self._make_loc("Sweetwater", self.town)
        las_mudas = self._make_loc("Las Mudas", self.town)

        rebuild_indicators(self.data_source_config._id)

        self.assertDataSourceAccurate(["Westworld", "Sweetwater", "Las Mudas"])

        # Insert new location
        since = self.pillow.get_change_feed().get_current_offsets()
        self._make_loc("Blood Arroyo", self.town)

        # Change an existing location
        sweetwater.name = "Pariah"
        sweetwater.save()

        # Process both changes together and verify that they went through
        self.pillow.process_changes(since=since, forever=False)
        self.assertDataSourceAccurate(
            ["Westworld", "Pariah", "Las Mudas", "Blood Arroyo"])

        # Delete a location
        since = self.pillow.get_change_feed().get_current_offsets()
        las_mudas.delete()
        self.pillow.process_changes(since=since, forever=False)
        # No actual change - deletions are not yet processed
        self.assertDataSourceAccurate(
            ["Westworld", "Pariah", "Las Mudas", "Blood Arroyo"])
Ejemplo n.º 41
0
 def test_table_id_is_required(self):
     with self.assertRaises(BadValueError):
         DataSourceConfiguration(
             domain='domain', referenced_doc_type='XFormInstance').save()
Ejemplo n.º 42
0
 def test_doc_type_is_required(self):
     with self.assertRaises(BadValueError):
         DataSourceConfiguration(domain='domain', table_id='table').save()
Ejemplo n.º 43
0
def _get_config_by_id(indicator_config_id):
    if id_is_static(indicator_config_id):
        return StaticDataSourceConfiguration.by_id(indicator_config_id)
    else:
        return DataSourceConfiguration.get(indicator_config_id)
Ejemplo n.º 44
0
def get_data_source_with_related_doc_type():
    folder = os.path.join(os.path.dirname(__file__), 'data', 'configs')
    sample_file = os.path.join(folder, 'parent_child_data_source.json')
    with open(sample_file, encoding='utf-8') as f:
        structure = json.loads(f.read())
        return DataSourceConfiguration.wrap(structure)
Ejemplo n.º 45
0
 def config(self):
     if self.config_id is None:
         return DataSourceConfiguration(domain=self.domain)
     return get_datasource_config_or_404(self.config_id, self.domain)[0]
 def tearDownClass(cls):
     cls.app.delete()
     for config in DataSourceConfiguration.all():
         config.delete()
     delete_all_report_configs()
     super(ReportBuilderDBTest, cls).tearDownClass()
Ejemplo n.º 47
0
def get_sample_data_source():
    folder = os.path.join(os.path.dirname(__file__), 'data', 'configs')
    sample_file = os.path.join(folder, 'sample_data_source.json')
    with open(sample_file) as f:
        structure = json.loads(f.read())
        return DataSourceConfiguration.wrap(structure)
    def test_updating_report_data_source(self):
        """
        Test that changing the app or number column for a report results in an update to the data source next time
        the report is saved.
        """

        # Make report
        builder_form = ConfigureTableReportForm(
            self.domain,
            "Test Report",
            self.app._id,
            "case",
            "some_case_type",
            existing_report=None,
            data={
                'group_by': ['closed'],
                'chart':
                'bar',
                'user_filters':
                '[]',
                'default_filters':
                '[]',
                'columns':
                '[{"property": "closed", "display_text": "closed", "calculation": "Count per Choice"}]',
            })
        self.assertTrue(builder_form.is_valid())
        report = builder_form.create_report()

        self.assertEqual(report.config.configured_indicators[0]['datatype'],
                         "string")

        # Make an edit to the first report builder report
        builder_form = ConfigureTableReportForm(
            self.domain,
            "Test Report",
            self.app._id,
            "case",
            "some_case_type",
            existing_report=report,
            data={
                'group_by': ['user_id'],
                'chart':
                'bar',
                'user_filters':
                '[]',
                'default_filters':
                '[]',
                # Note that a "Sum" calculation on the closed case property isn't very sensical, but doing it so
                # that I can have a numeric calculation without having to create real case properties for this case
                #  type.
                'columns':
                '[{"property": "closed", "display_text": "closed", "calculation": "Sum"}]',
            })
        self.assertTrue(builder_form.is_valid())
        builder_form.update_report()

        # reload report data source, because report.config is memoized
        data_source = DataSourceConfiguration.get(report.config._id)
        # The closed property indicator should now be decimal type because the user indicated that it was numeric
        # by giving the column the "Sum" aggregation.
        self.assertEqual(data_source.configured_indicators[0]['datatype'],
                         "decimal")
Ejemplo n.º 49
0
class ReportDataTest(TestCase):
    dependent_apps = ['pillowtop']

    def setUp(self):
        super(ReportDataTest, self).setUp()
        # Create report
        self.domain = 'test-ucr-report-data'
        self.data_source = DataSourceConfiguration(
            domain=self.domain,
            referenced_doc_type='CommCareCase',
            table_id=uuid.uuid4().hex,
            configured_filter={},
            configured_indicators=[{
                "type": "expression",
                "expression": {
                    "type": "property_name",
                    "property_name": 'name'
                },
                "column_id": 'name',
                "display_name": 'name',
                "datatype": "string"
            }, {
                "type": "expression",
                "expression": {
                    "type": "property_name",
                    "property_name": 'number'
                },
                "column_id": 'number',
                "display_name": 'number',
                "datatype": "integer"
            }],
        )
        self.data_source.validate()
        self.data_source.save()
        IndicatorSqlAdapter(self.data_source).rebuild_table()
        self.addCleanup(self.data_source.delete)

        # initialize a report on the data
        self.report_config = ReportConfiguration(
            domain=self.domain,
            config_id=self.data_source._id,
            aggregation_columns=['doc_id'],
            columns=[{
                "type": "field",
                "field": "name",
                "column_id": "name",
                "display": "Name",
                "aggregation": "simple",
            }, {
                "type": "field",
                "field": "number",
                "column_id": "number",
                "display": "Number",
                "aggregation": "simple",
            }],
            filters=[],
            configured_charts=[])
        self.report_config.save()
        self.addCleanup(self.report_config.delete)

    def _add_some_rows(self, count):
        rows = [ReportDataTestRow(uuid.uuid4().hex, i) for i in range(count)]
        self._add_rows(rows)
        return rows

    def _add_rows(self, rows):
        pillow = get_kafka_ucr_pillow()
        pillow.bootstrap(configs=[self.data_source])

        def _get_case(row):
            return {
                '_id': uuid.uuid4().hex,
                'domain': self.domain,
                'doc_type': 'CommCareCase',
                'type': 'city',
                'name': row.name,
                'number': row.number,
            }

        for row in rows:
            pillow.process_change(doc_to_change(_get_case(row)))

    def test_basic_query(self):
        # add a few rows to the data source
        rows = self._add_some_rows(3)

        # check the returned data from the report looks right
        report_data_source = ReportFactory.from_spec(self.report_config)
        report_data = report_data_source.get_data()
        self.assertEqual(len(rows), len(report_data))
        rows_by_name = {r.name: r for r in rows}
        for row in report_data:
            self.assertTrue(row['name'] in rows_by_name)
            self.assertEqual(rows_by_name[row['name']].number, row['number'])

    def test_limit(self):
        count = 5
        self._add_some_rows(count)
        report_data_source = ReportFactory.from_spec(self.report_config)
        original_data = report_data_source.get_data()
        self.assertEqual(count, len(original_data))
        limited_data = report_data_source.get_data(limit=3)
        self.assertEqual(3, len(limited_data))
        self.assertEqual(original_data[:3], limited_data)

    def test_skip(self):
        count = 5
        self._add_some_rows(count)
        report_data_source = ReportFactory.from_spec(self.report_config)
        original_data = report_data_source.get_data()
        self.assertEqual(count, len(original_data))
        skipped = report_data_source.get_data(start=3)
        self.assertEqual(count - 3, len(skipped))
        self.assertEqual(original_data[3:], skipped)
Ejemplo n.º 50
0
    def _build_report_and_view(cls):

        # Create Cases
        cls._new_case({'fruit': 'apple', 'num1': 4, 'num2': 6}).save()

        # Create report
        data_source_config = DataSourceConfiguration(
            domain=cls.domain,
            display_name='foo',
            referenced_doc_type='CommCareCase',
            table_id="woop_woop",
            configured_filter={
                "type": "boolean_expression",
                "operator": "eq",
                "expression": {
                    "type": "property_name",
                    "property_name": "type"
                },
                "property_value": cls.case_type,
            },
            configured_indicators=[
                {
                    "type": "expression",
                    "expression": {
                        "type": "property_name",
                        "property_name": 'fruit'
                    },
                    "column_id": 'indicator_col_id_fruit',
                    "display_name": 'indicator_display_name_fruit',
                    "datatype": "string"
                },
                {
                    "type": "expression",
                    "expression": {
                        "type": "property_name",
                        "property_name": 'num1'
                    },
                    "column_id": 'indicator_col_id_num1',
                    "datatype": "integer"
                },
                {
                    "type": "expression",
                    "expression": {
                        "type": "property_name",
                        "property_name": 'num2'
                    },
                    "column_id": 'indicator_col_id_num2',
                    "datatype": "integer"
                },
            ],
        )
        data_source_config.validate()
        data_source_config.save()
        tasks.rebuild_indicators(data_source_config._id)

        report_config = ReportConfiguration(
            domain=cls.domain,
            config_id=data_source_config._id,
            title='foo',
            aggregation_columns=['doc_id'],
            columns=[
                {
                    "type": "field",
                    "display": "report_column_display_fruit",
                    "field": 'indicator_col_id_fruit',
                    'column_id': 'report_column_col_id_fruit',
                    'aggregation': 'simple'
                },
                {
                    "type": "percent",
                    "display": "report_column_display_percent",
                    'column_id': 'report_column_col_id_percent',
                    'format': 'percent',
                    "denominator": {
                        "type": "field",
                        "aggregation": "sum",
                        "field": "indicator_col_id_num1",
                        "column_id": "report_column_col_id_percent_num1"
                    },
                    "numerator": {
                        "type": "field",
                        "aggregation": "sum",
                        "field": "indicator_col_id_num2",
                        "column_id": "report_column_col_id_percent_num2"
                    }
                },
            ],
        )
        report_config.save()

        view = ConfigurableReport(request=HttpRequest())
        view._domain = cls.domain
        view._lang = "en"
        view._report_config_id = report_config._id

        return report_config, view
Ejemplo n.º 51
0
 def setUp(self):
     folder = os.path.join(os.path.dirname(__file__), 'data', 'configs')
     sample_file = os.path.join(folder, 'data_source_with_repeat.json')
     with open(sample_file) as f:
         self.config = DataSourceConfiguration.wrap(json.loads(f.read()))
Ejemplo n.º 52
0
 def tearDownClass(cls):
     for config in DataSourceConfiguration.all():
         config.delete()
     super(DataSourceConfigurationDbTest, cls).tearDownClass()
Ejemplo n.º 53
0
 def get_data_sources(self):
     return DataSourceConfiguration.view('userreports/active_data_sources',
                                         reduce=False,
                                         include_docs=True).all()
Ejemplo n.º 54
0
 def _setup_config(self, doc_type, filter_):
     return DataSourceConfiguration(domain='test',
                                    referenced_doc_type=doc_type,
                                    table_id='blah',
                                    configured_filter=filter_)
Ejemplo n.º 55
0
    def _build_report_and_view(self, request=HttpRequest()):
        # Create report
        data_source_config = DataSourceConfiguration(
            domain=self.domain,
            display_name='foo',
            referenced_doc_type='CommCareCase',
            table_id="woop_woop",
            configured_filter={
                "type": "boolean_expression",
                "operator": "eq",
                "expression": {
                    "type": "property_name",
                    "property_name": "type"
                },
                "property_value": self.case_type,
            },
            configured_indicators=[
                {
                    "type": "expression",
                    "expression": {
                        "type": "property_name",
                        "property_name": 'fruit'
                    },
                    "column_id": 'indicator_col_id_fruit',
                    "display_name": 'indicator_display_name_fruit',
                    "datatype": "string"
                },
                {
                    "type": "expression",
                    "expression": {
                        "type": "property_name",
                        "property_name": 'num1'
                    },
                    "column_id": 'indicator_col_id_num1',
                    "datatype": "integer"
                },
                {
                    "type": "expression",
                    "expression": {
                        "type": "property_name",
                        "property_name": 'num2'
                    },
                    "column_id": 'indicator_col_id_num2',
                    "datatype": "integer"
                },
            ],
        )
        data_source_config.validate()
        data_source_config.save()
        self.addCleanup(data_source_config.delete)
        tasks.rebuild_indicators(data_source_config._id)

        report_config = ReportConfiguration(
            domain=self.domain,
            config_id=data_source_config._id,
            title='foo',
            aggregation_columns=['doc_id'],
            columns=[{
                "type": "field",
                "display": "report_column_display_fruit",
                "field": 'indicator_col_id_fruit',
                'column_id': 'report_column_col_id_fruit',
                'aggregation': 'simple'
            }, {
                "type": "percent",
                "display": "report_column_display_percent",
                'column_id': 'report_column_col_id_percent',
                'format': 'percent',
                "denominator": {
                    "type": "field",
                    "aggregation": "sum",
                    "field": "indicator_col_id_num1",
                    "column_id": "report_column_col_id_percent_num1"
                },
                "numerator": {
                    "type": "field",
                    "aggregation": "sum",
                    "field": "indicator_col_id_num2",
                    "column_id": "report_column_col_id_percent_num2"
                }
            }, {
                "type": "expanded",
                "display": "report_column_display_expanded_num1",
                "field": 'indicator_col_id_num1',
                'column_id': 'report_column_col_id_expanded_num1',
            }],
            configured_charts=[
                {
                    "type": 'pie',
                    "value_column": 'count',
                    "aggregation_column": 'fruit',
                    "title": 'Fruits'
                },
                {
                    "type":
                    'multibar',
                    "title":
                    'Fruit Properties',
                    "x_axis_column":
                    'fruit',
                    "y_axis_columns": [{
                        "column_id": "report_column_col_id_expanded_num1",
                        "display": "Num1 values"
                    }]
                },
            ])
        report_config.save()
        self.addCleanup(report_config.delete)

        view = ConfigurableReportView(request=request)
        view._domain = self.domain
        view._lang = "en"
        view._report_config_id = report_config._id

        return report_config, view
Ejemplo n.º 56
0
 def _delete_everything(cls):
     delete_all_cases()
     for config in DataSourceConfiguration.all():
         config.delete()
     delete_all_report_configs()
Ejemplo n.º 57
0
 def test_get_all(self):
     self.assertEqual(3, len(list(DataSourceConfiguration.all())))
Ejemplo n.º 58
0
 def test_is_static_negative(self):
     self.assertFalse(DataSourceConfiguration().is_static)
     self.assertFalse(
         DataSourceConfiguration(_id=uuid.uuid4().hex).is_static)
    def test_report_builder_datasource_deactivation(self):
        def _get_data_source(id_):
            return get_datasource_config(id_, self.project.name)[0]

        # Upgrade the domain
        # (for the upgrade to work, there has to be an existing subscription,
        # which is why we subscribe to advanced first)
        self._subscribe_to_advanced()
        pro_with_rb_sub = self._subscribe_to_pro_with_rb()

        # Create reports and data sources
        builder_report_data_source = DataSourceConfiguration(
            domain=self.project.name,
            is_deactivated=False,
            referenced_doc_type="XFormInstance",
            table_id="foo",
        )
        other_data_source = DataSourceConfiguration(
            domain=self.project.name,
            is_deactivated=False,
            referenced_doc_type="XFormInstance",
            table_id="bar",
        )
        builder_report_data_source.save()
        other_data_source.save()
        report_builder_report = ReportConfiguration(
            domain=self.project.name,
            config_id=builder_report_data_source._id,
            report_meta=ReportMeta(created_by_builder=True),
        )
        report_builder_report.save()

        # downgrade the domain
        community_sub = pro_with_rb_sub.change_plan(
            DefaultProductPlan.get_default_plan_version())

        # Check that the builder data source is deactivated
        builder_report_data_source = _get_data_source(
            builder_report_data_source._id)
        self.assertTrue(builder_report_data_source.is_deactivated)
        # Check that the other data source has not been deactivated
        other_data_source = _get_data_source(other_data_source._id)
        self.assertFalse(other_data_source.is_deactivated)

        # upgrade the domain
        # (for the upgrade to work, there has to be an existing subscription,
        # which is why we subscribe to advanced first)
        community_sub.change_plan(
            DefaultProductPlan.get_default_plan_version(
                edition=SoftwarePlanEdition.ADVANCED))
        pro_with_rb_sub = self._subscribe_to_pro_with_rb()

        # check that the data source is activated
        builder_report_data_source = _get_data_source(
            builder_report_data_source._id)
        self.assertFalse(builder_report_data_source.is_deactivated)

        # delete the data sources
        builder_report_data_source.delete()
        other_data_source.delete()
        # Delete the report
        report_builder_report.delete()

        # reset the subscription
        pro_with_rb_sub.change_plan(
            DefaultProductPlan.get_default_plan_version())
Ejemplo n.º 60
0
 def setUp(self):
     self.indicator_configuration = DataSourceConfiguration.wrap({
         'display_name':
         'Mother Indicators',
         'doc_type':
         'DataSourceConfiguration',
         'domain':
         'test',
         'referenced_doc_type':
         'CommCareCase',
         'table_id':
         'mother_indicators',
         'named_expressions': {
             'on_a_date': {
                 'type': 'property_name',
                 'property_name': 'on_date',
             }
         },
         'named_filters': {
             'pregnant': {
                 'type': 'property_match',
                 'property_name': 'mother_state',
                 'property_value': 'pregnant',
             },
             'evil': {
                 'type': 'property_match',
                 'property_name': 'evil',
                 'property_value': 'yes',
             },
             'has_alibi': {
                 'type': 'boolean_expression',
                 'expression': {
                     'type': 'named',
                     'name': 'on_a_date'
                 },
                 'operator': 'eq',
                 'property_value': 'yes',
             }
         },
         'configured_filter': {
             'type':
             'and',
             'filters': [{
                 'property_name': 'type',
                 'property_value': 'ttc_mother',
                 'type': 'property_match',
             }, {
                 'type': 'named',
                 'name': 'pregnant',
             }]
         },
         'configured_indicators': [{
             "type": "boolean",
             "column_id": "is_evil",
             "filter": {
                 "type": "named",
                 "name": "evil"
             }
         }, {
             "type": "expression",
             "column_id": "laugh_sound",
             "datatype": "string",
             "expression": {
                 'type': 'conditional',
                 'test': {
                     "type": "named",
                     "name": "evil"
                 },
                 'expression_if_true': {
                     'type': 'constant',
                     'constant': 'mwa-ha-ha',
                 },
                 'expression_if_false': {
                     'type': 'constant',
                     'constant': 'hehe',
                 },
             }
         }]
     })