def test_import_from_basic_definition(self): spec = self.get_monthly_config_spec() data_source = DataSourceConfiguration( domain=spec.domain, referenced_doc_type='CommCareCase', table_id='some_table', ) data_source.save() self.addCleanup(data_source.delete) # these just have to be valid data source objects spec.primary_table.data_source_id = data_source._id spec.secondary_tables[0].data_source_id = data_source._id aggregate_table_definition = import_aggregation_models_from_spec(spec) self.assertEqual(1, AggregateTableDefinition.objects.count()) table_def = AggregateTableDefinition.objects.get(pk=aggregate_table_definition.pk) self.assertEqual(data_source._id, table_def.primary_data_source_id.hex) self.assertEqual(4, table_def.primary_columns.count()) aggregation = table_def.time_aggregation self.assertEqual('month', aggregation.aggregation_unit) self.assertEqual('opened_date', aggregation.start_column) self.assertEqual('closed_date', aggregation.end_column) self.assertEqual(1, table_def.secondary_tables.count()) secondary_table = table_def.secondary_tables.get() self.assertEqual(data_source._id, secondary_table.data_source_id.hex) self.assertEqual('doc_id', secondary_table.join_column_primary) self.assertEqual('form.case.@case_id', secondary_table.join_column_secondary) self.assertEqual('received_on', secondary_table.time_window_column) self.assertEqual(2, secondary_table.columns.count()) self.assertEqual( set(['fu_forms_in_month', 'any_fu_forms_in_month']), set(secondary_table.columns.values_list('column_id', flat=True)) )
def setUpClass(cls): data_source = DataSourceConfiguration( domain=cls.DOMAIN, table_id="foo", referenced_doc_type="CommCareCase", ) data_source.save() ReportConfiguration( domain=cls.DOMAIN, config_id=data_source._id, columns=[ { "type": "field", "field": "foo", "column_id": "foo", "aggregation": "simple", "display": "My Column", }, { "type": "field", "field": "bar", "column_id": "bar", "aggregation": "simple", "display": {"en": "Name", "fra": "Nom"}, }, ] ).save()
def setUpClass(cls): super(ReportTranslationTest, cls).setUpClass() data_source = DataSourceConfiguration( domain=cls.DOMAIN, table_id="foo", referenced_doc_type="CommCareCase", ) data_source.save() ReportConfiguration(domain=cls.DOMAIN, config_id=data_source._id, columns=[ { "type": "field", "field": "foo", "column_id": "foo", "aggregation": "simple", "display": "My Column", }, { "type": "field", "field": "bar", "column_id": "bar", "aggregation": "simple", "display": { "en": "Name", "fra": "Nom" }, }, ]).save()
def _create_data_source(cls): cls.data_sources = {} cls.adapters = {} # this is a hack to have both sql and es backends created in a class # method. alternative would be to have these created on each test run for backend_id in UCR_BACKENDS: config = DataSourceConfiguration( backend_id=backend_id, domain=cls.domain, display_name=cls.domain, referenced_doc_type='CommCareCase', table_id="foo", configured_filter={ "type": "boolean_expression", "operator": "eq", "expression": { "type": "property_name", "property_name": "type" }, "property_value": cls.case_type, }, configured_indicators=[ { "type": "expression", "expression": { "type": "property_name", "property_name": 'my_date' }, "column_id": 'date_as_string', "display_name": 'date_as_string', "datatype": "string" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'my_date' }, "column_id": 'date_as_date', "datatype": "date" }, { "type": "expression", "expression": { "type": "property_name", "property_name": "my_datetime", }, "column_id": "datetime_as_datetime", "datatype": "datetime" } ], ) config.validate() config.save() rebuild_indicators(config._id) adapter = get_indicator_adapter(config) adapter.refresh_table() cls.data_sources[backend_id] = config cls.adapters[backend_id] = adapter
def test_import_from_basic_definition(self): spec = self.get_monthly_config_spec() data_source = DataSourceConfiguration( domain=spec.domain, referenced_doc_type='CommCareCase', table_id='some_table', ) data_source.save() self.addCleanup(data_source.delete) # these just have to be valid data source objects spec.primary_table.data_source_id = data_source._id spec.secondary_tables[0].data_source_id = data_source._id aggregate_table_definition = import_aggregation_models_from_spec(spec) self.assertEqual(1, AggregateTableDefinition.objects.count()) table_def = AggregateTableDefinition.objects.get( pk=aggregate_table_definition.pk) self.assertEqual(data_source._id, table_def.primary_data_source_id.hex) self.assertEqual(4, table_def.primary_columns.count()) aggregation = table_def.time_aggregation self.assertEqual('month', aggregation.aggregation_unit) self.assertEqual('opened_date', aggregation.start_column) self.assertEqual('closed_date', aggregation.end_column) self.assertEqual(1, table_def.secondary_tables.count()) secondary_table = table_def.secondary_tables.get() self.assertEqual(data_source._id, secondary_table.data_source_id.hex) self.assertEqual('doc_id', secondary_table.join_column_primary) self.assertEqual('form.case.@case_id', secondary_table.join_column_secondary) self.assertEqual('received_on', secondary_table.time_window_column) self.assertEqual(1, secondary_table.columns.count()) secondary_column = secondary_table.columns.get() self.assertEqual('fu_forms_in_month', secondary_column.column_id)
def _create_data_source(cls): cls.data_sources = {} cls.adapters = {} for backend_id in UCR_BACKENDS: config = DataSourceConfiguration( backend_id=backend_id, domain=cls.domain, display_name=cls.domain, referenced_doc_type='CommCareCase', table_id="foo", configured_filter={ "type": "boolean_expression", "operator": "eq", "expression": { "type": "property_name", "property_name": "type" }, "property_value": cls.case_type, }, configured_indicators=[ { "type": "expression", "expression": { "type": "property_name", "property_name": 'state' }, "column_id": 'indicator_col_id_state', "display_name": 'indicator_display_name_state', "datatype": "string" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'city' }, "column_id": 'indicator_col_id_city', "display_name": 'indicator_display_name_city', "datatype": "string" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'number' }, "column_id": 'indicator_col_id_number', "datatype": "integer" }, ], ) config.validate() config.save() rebuild_indicators(config._id) adapter = get_indicator_adapter(config) adapter.refresh_table() cls.data_sources[backend_id] = config cls.adapters[backend_id] = adapter
def _build_report(self, vals, field="my_field", build_data_source=True): """ Build a new report, and populate it with cases. Return a ConfigurableReportDataSource and a FieldColumn :param vals: List of values to populate the given report field with. :param field: The name of a field in the data source/report :return: Tuple containing a ConfigurableReportDataSource and FieldColumn. The column is a column mapped to the given field. """ # Create Cases for v in vals: self._new_case({field: v}).save() # Create report data_source_config = DataSourceConfiguration( domain=self.domain, display_name="foo", referenced_doc_type="CommCareCase", table_id=_clean_table_name(self.domain, str(uuid.uuid4().hex)), configured_filter={ "type": "boolean_expression", "operator": "eq", "expression": {"type": "property_name", "property_name": "type"}, "property_value": self.case_type, }, configured_indicators=[ { "type": "expression", "expression": {"type": "property_name", "property_name": field}, "column_id": field, "display_name": field, "datatype": "string", } ], ) data_source_config.validate() data_source_config.save() if build_data_source: tasks.rebuild_indicators(data_source_config._id) report_config = ReportConfiguration( domain=self.domain, config_id=data_source_config._id, title="foo", aggregation_columns=["doc_id"], columns=[{"type": "expanded", "field": field, "display": field, "format": "default"}], filters=[], configured_charts=[], ) report_config.save() data_source = ReportFactory.from_spec(report_config) return data_source, data_source.column_configs[0]
def _create_data_source(cls): cls.data_sources = {} cls.adapters = {} config = DataSourceConfiguration( domain=cls.domain, display_name=cls.domain, referenced_doc_type='CommCareCase', table_id="foo", configured_filter={ "type": "boolean_expression", "operator": "eq", "expression": { "type": "property_name", "property_name": "type" }, "property_value": cls.case_type, }, configured_indicators=[ { "type": "expression", "expression": { "type": "property_name", "property_name": 'my_date' }, "column_id": 'date_as_string', "display_name": 'date_as_string', "datatype": "string" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'my_date' }, "column_id": 'date_as_date', "datatype": "date" }, { "type": "expression", "expression": { "type": "property_name", "property_name": "my_datetime", }, "column_id": "datetime_as_datetime", "datatype": "datetime" } ], ) config.validate() config.save() rebuild_indicators(config._id) adapter = get_indicator_adapter(config) cls.data_sources[UCR_SQL_BACKEND] = config cls.adapters[UCR_SQL_BACKEND] = adapter
def _create_data_source(cls): cls.data_sources = {} cls.adapters = {} # this is a hack to have both sql and es backends created in a class # method. alternative would be to have these created on each test run for backend_id in UCR_BACKENDS: config = DataSourceConfiguration( backend_id=backend_id, domain=cls.domain, display_name=cls.domain, referenced_doc_type='CommCareCase', table_id="foo", configured_filter={ "type": "boolean_expression", "operator": "eq", "expression": { "type": "property_name", "property_name": "type" }, "property_value": cls.case_type, }, configured_indicators=[ { "type": "expression", "expression": { "type": "property_name", "property_name": 'first_name' }, "column_id": 'indicator_col_id_first_name', "display_name": 'indicator_display_name_first_name', "datatype": "string" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'number' }, "column_id": 'indicator_col_id_number', "datatype": "integer" }, ], ) config.validate() config.save() rebuild_indicators(config._id) adapter = get_indicator_adapter(config) adapter.refresh_table() cls.data_sources[backend_id] = config cls.adapters[backend_id] = adapter
def test_last_modified_date(self): start = datetime.datetime.utcnow() time.sleep(.01) data_source = DataSourceConfiguration( domain='mod-test', table_id='mod-test', referenced_doc_type='mod-test' ) data_source.save() self.assertTrue(start < data_source.last_modified) time.sleep(.01) between = datetime.datetime.utcnow() self.assertTrue(between > data_source.last_modified) time.sleep(.01) data_source.save() time.sleep(.01) self.assertTrue(between < data_source.last_modified) self.assertTrue(datetime.datetime.utcnow() > data_source.last_modified)
def test_last_modified_date(self): start = datetime.datetime.utcnow() time.sleep(.01) data_source = DataSourceConfiguration(domain='mod-test', table_id='mod-test', referenced_doc_type='mod-test') data_source.save() self.assertTrue(start < data_source.last_modified) time.sleep(.01) between = datetime.datetime.utcnow() self.assertTrue(between > data_source.last_modified) time.sleep(.01) data_source.save() time.sleep(.01) self.assertTrue(between < data_source.last_modified) self.assertTrue(datetime.datetime.utcnow() > data_source.last_modified)
def _create_report(domain, title="report", upstream_id=None, should_save=True): data_source = DataSourceConfiguration( domain=domain, table_id=uuid.uuid4().hex, referenced_doc_type='XFormInstance', ) data_source.save() report = ReportConfiguration( domain=domain, config_id=data_source._id, title=title, report_meta=ReportMeta(created_by_builder=True, master_id=upstream_id), ) if should_save: report.save() return report
def _build_data_source(self): data_source_config = DataSourceConfiguration( domain=self.domain, display_name=self.ds_builder.data_source_name, referenced_doc_type=self.ds_builder.source_doc_type, # The uuid gets truncated, so it's not really universally unique. table_id=_clean_table_name(self.domain, str(uuid.uuid4().hex)), configured_filter=self.ds_builder.filter, configured_indicators=self.ds_builder.indicators, meta=DataSourceMeta(build=DataSourceBuildInformation( source_id=self.report_source_id, app_id=self.app._id, app_version=self.app.version, ))) data_source_config.validate() data_source_config.save() tasks.rebuild_indicators.delay(data_source_config._id) return data_source_config._id
def create_report(self): """ Creates data source and report config. """ matching_data_source = self.ds_builder.get_existing_match() if matching_data_source: data_source_config_id = matching_data_source['id'] else: data_source_config = DataSourceConfiguration( domain=self.domain, display_name=self.ds_builder.data_source_name, referenced_doc_type=self.ds_builder.source_doc_type, # The uuid gets truncated, so it's not really universally unique. table_id=_clean_table_name(self.domain, str(uuid.uuid4().hex)), configured_filter=self.ds_builder.filter, configured_indicators=self.ds_builder.indicators, meta=DataSourceMeta(build=DataSourceBuildInformation( source_id=self.report_source_id, app_id=self.app._id, app_version=self.app.version, )) ) data_source_config.validate() data_source_config.save() tasks.rebuild_indicators.delay(data_source_config._id) data_source_config_id = data_source_config._id report = ReportConfiguration( domain=self.domain, config_id=data_source_config_id, title=self.report_name, aggregation_columns=self._report_aggregation_cols, columns=self._report_columns, filters=self._report_filters, configured_charts=self._report_charts, report_meta=ReportMeta( created_by_builder=True, builder_report_type=self.report_type ) ) report.validate() report.save() return report
def _build_data_source(self): data_source_config = DataSourceConfiguration( domain=self.domain, display_name=self.ds_builder.data_source_name, referenced_doc_type=self.ds_builder.source_doc_type, # The uuid gets truncated, so it's not really universally unique. table_id=_clean_table_name(self.domain, str(uuid.uuid4().hex)), configured_filter=self.ds_builder.filter, configured_indicators=self.ds_builder.indicators, meta=DataSourceMeta(build=DataSourceBuildInformation( source_id=self.report_source_id, app_id=self.app._id, app_version=self.app.version, )) ) data_source_config.validate() data_source_config.save() tasks.rebuild_indicators.delay(data_source_config._id) return data_source_config._id
def _build_report(cls): # Create Cases cls._new_case({'fruit': 'apple', 'num1': 4, 'num2': 6}).save() # Create report data_source_config = DataSourceConfiguration( domain=cls.domain, display_name='foo', referenced_doc_type='CommCareCase', table_id="woop_woop", configured_filter={ "type": "boolean_expression", "operator": "eq", "expression": { "type": "property_name", "property_name": "type" }, "property_value": cls.case_type, }, configured_indicators=[ { "type": "expression", "expression": { "type": "property_name", "property_name": 'fruit' }, "column_id": 'indicator_col_id_fruit', "display_name": 'indicator_display_name_fruit', "datatype": "string" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'num1' }, "column_id": 'indicator_col_id_num1', "datatype": "integer" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'num2' }, "column_id": 'indicator_col_id_num2', "datatype": "integer" }, ], ) data_source_config.validate() data_source_config.save() tasks.rebuild_indicators(data_source_config._id) report_config = ReportConfiguration( domain=cls.domain, config_id=data_source_config._id, title='foo', aggregation_columns=['doc_id'], columns=[ { "type": "field", "display": "report_column_display_fruit", "field": 'indicator_col_id_fruit', 'column_id': 'report_column_col_id_fruit', 'aggregation': 'simple' }, { "type": "percent", "display": "report_column_display_percent", 'column_id': 'report_column_col_id_percent', 'format': 'percent', "denominator": { "type": "field", "aggregation": "sum", "field": "indicator_col_id_num1", "column_id": "report_column_col_id_percent_num1" }, "numerator": { "type": "field", "aggregation": "sum", "field": "indicator_col_id_num2", "column_id": "report_column_col_id_percent_num2" } }, ], ) report_config.save() return report_config
class ReportDataTest(TestCase): def setUp(self): super(ReportDataTest, self).setUp() # Create report self.domain = "test-ucr-report-data" self.data_source = DataSourceConfiguration( domain=self.domain, referenced_doc_type="CommCareCase", table_id=uuid.uuid4().hex, configured_filter={}, configured_indicators=[ { "type": "expression", "expression": {"type": "property_name", "property_name": "name"}, "column_id": "name", "display_name": "name", "datatype": "string", }, { "type": "expression", "expression": {"type": "property_name", "property_name": "number"}, "column_id": "number", "display_name": "number", "datatype": "integer", }, ], ) self.data_source.validate() self.data_source.save() self.adapter = get_indicator_adapter(self.data_source) self.adapter.rebuild_table() self.addCleanup(self.data_source.delete) # initialize a report on the data self.report_config = ReportConfiguration( domain=self.domain, config_id=self.data_source._id, aggregation_columns=["doc_id"], columns=[ {"type": "field", "field": "name", "column_id": "name", "display": "Name", "aggregation": "simple"}, { "type": "field", "field": "number", "column_id": "number", "display": "Number", "aggregation": "simple", }, { "type": "expression", "column_id": "ten", "display": "The Number Ten", "expression": {"type": "constant", "constant": 10}, }, { "type": "expression", "column_id": "by_tens", "display": "Counting by tens", "expression": { "type": "evaluator", "statement": "a * b", "context_variables": { "a": {"type": "property_name", "property_name": "number"}, "b": {"type": "property_name", "property_name": "ten"}, }, }, }, ], filters=[], configured_charts=[], ) self.report_config.save() self.addCleanup(self.report_config.delete) def _add_some_rows(self, count): rows = [ReportDataTestRow(uuid.uuid4().hex, i) for i in range(count)] self._add_rows(rows) self.adapter.refresh_table() return rows def _add_rows(self, rows): pillow = get_kafka_ucr_pillow() pillow.bootstrap(configs=[self.data_source]) def _get_case(row): return { "_id": uuid.uuid4().hex, "domain": self.domain, "doc_type": "CommCareCase", "type": "city", "name": row.name, "number": row.number, } for row in rows: pillow.process_change(doc_to_change(_get_case(row))) @run_with_all_ucr_backends def test_basic_query(self): # add a few rows to the data source rows = self._add_some_rows(3) # check the returned data from the report looks right report_data_source = ReportFactory.from_spec(self.report_config) report_data = report_data_source.get_data() self.assertEqual(len(rows), len(report_data)) rows_by_name = {r.name: r for r in rows} for row in report_data: self.assertTrue(row["name"] in rows_by_name) self.assertEqual(rows_by_name[row["name"]].number, row["number"]) self.assertEqual(10, row["ten"]) self.assertEqual(10 * row["number"], row["by_tens"]) @run_with_all_ucr_backends def test_limit(self): count = 5 self._add_some_rows(count) report_data_source = ReportFactory.from_spec(self.report_config) original_data = report_data_source.get_data() self.assertEqual(count, len(original_data)) limited_data = report_data_source.get_data(limit=3) self.assertEqual(3, len(limited_data)) self.assertEqual(original_data[:3], limited_data) @run_with_all_ucr_backends def test_skip(self): count = 5 self._add_some_rows(count) report_data_source = ReportFactory.from_spec(self.report_config) original_data = report_data_source.get_data() self.assertEqual(count, len(original_data)) skipped = report_data_source.get_data(start=3) self.assertEqual(count - 3, len(skipped)) self.assertEqual(original_data[3:], skipped)
class ReportDataTest(TestCase): def setUp(self): super(ReportDataTest, self).setUp() # Create report self.domain = 'test-ucr-report-data' self.data_source = DataSourceConfiguration( domain=self.domain, referenced_doc_type='CommCareCase', table_id=uuid.uuid4().hex, configured_filter={}, configured_indicators=[{ "type": "expression", "expression": { "type": "property_name", "property_name": 'name' }, "column_id": 'name', "display_name": 'name', "datatype": "string" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'number' }, "column_id": 'number', "display_name": 'number', "datatype": "integer" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'number' }, "column_id": 'string-number', "display_name": 'string-number', "datatype": "string" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'just_for_sorting' }, "column_id": 'just_for_sorting', "display_name": 'just_for_sorting', "datatype": "string" }], ) self.data_source.validate() self.data_source.save() self.adapter = get_indicator_adapter(self.data_source) self.adapter.rebuild_table() self.addCleanup(self.data_source.delete) # initialize a report on the data self.report_config = ReportConfiguration( domain=self.domain, config_id=self.data_source._id, aggregation_columns=['doc_id'], columns=[{ "type": "field", "field": "name", "column_id": "name", "display": "Name", "aggregation": "simple", }, { "type": "field", "field": "number", "column_id": "number", "display": "Number", "aggregation": "simple", "calculate_total": True, }, { "type": "expression", "column_id": "ten", "display": "The Number Ten", "expression": { 'type': 'constant', 'constant': 10, } }, { "type": "expression", "column_id": "by_tens", "display": "Counting by tens", "expression": { "type": "evaluator", "statement": "a * b", "context_variables": { "a": { "type": "property_name", "property_name": "number", }, "b": { "type": "property_name", "property_name": "ten", } } } }, { "type": "field", "field": 'string-number', "display": 'Display Number', "aggregation": "simple", "transform": { "type": "translation", "translations": { "0": "zero", "1": { "en": "one", "es": "uno" }, "2": { "en": "two", "es": "dos" } }, }, }], filters=[], configured_charts=[], sort_expression=[{ 'field': 'just_for_sorting', 'order': 'DESC' }]) self.report_config.save() self.addCleanup(self.report_config.delete) def _add_some_rows(self, count): rows = [ ReportDataTestRow(uuid.uuid4().hex, i, i) for i in range(count) ] self._add_rows(rows) return rows def _add_rows(self, rows): pillow = get_kafka_ucr_pillow() pillow.bootstrap(configs=[self.data_source]) def _get_case(row): return { '_id': uuid.uuid4().hex, 'domain': self.domain, 'doc_type': 'CommCareCase', 'type': 'city', 'name': row.name, 'number': row.number, 'just_for_sorting': row.sort_key, } for row in rows: pillow.process_change(doc_to_change(_get_case(row))) def test_basic_query(self): # add a few rows to the data source rows = self._add_some_rows(3) # check the returned data from the report looks right report_data_source = ConfigurableReportDataSource.from_spec( self.report_config) report_data = report_data_source.get_data() self.assertEqual(len(rows), len(report_data)) rows_by_name = {r.name: r for r in rows} for row in report_data: self.assertTrue(row['name'] in rows_by_name) self.assertEqual(rows_by_name[row['name']].number, row['number']) self.assertEqual(10, row['ten']) self.assertEqual(10 * row['number'], row['by_tens']) def test_limit(self): count = 5 self._add_some_rows(count) report_data_source = ConfigurableReportDataSource.from_spec( self.report_config) original_data = report_data_source.get_data() self.assertEqual(count, len(original_data)) limited_data = report_data_source.get_data(limit=3) self.assertEqual(3, len(limited_data)) self.assertEqual(original_data[:3], limited_data) def test_skip(self): count = 5 self._add_some_rows(count) report_data_source = ConfigurableReportDataSource.from_spec( self.report_config) original_data = report_data_source.get_data() self.assertEqual(count, len(original_data)) skipped = report_data_source.get_data(start=3) self.assertEqual(count - 3, len(skipped)) self.assertEqual(original_data[3:], skipped) def test_total_row(self): rows = self._add_some_rows(3) report_data_source = ConfigurableReportDataSource.from_spec( self.report_config) total_number = sum(row.number for row in rows) self.assertEqual(report_data_source.get_total_row(), ['Total', total_number, '', '', '']) def test_transform(self): count = 5 self._add_some_rows(count) report_data_source = ConfigurableReportDataSource.from_spec( self.report_config) original_data = report_data_source.get_data() self.assertEqual(count, len(original_data)) rows_by_number = {int(row['number']): row for row in original_data} # Make sure the translations happened self.assertEqual(rows_by_number[0]['string-number'], "zero") self.assertEqual(rows_by_number[1]['string-number'], "one") self.assertEqual(rows_by_number[2]['string-number'], "two") # These last two are untranslated self.assertEqual(rows_by_number[3]['string-number'], "3") self.assertEqual(rows_by_number[4]['string-number'], "4")
class TestLocationDataSource(TestCase): domain = "delos_corp" def setUp(self): self.domain_obj = create_domain(self.domain) self.region = LocationType.objects.create(domain=self.domain, name="region") self.town = LocationType.objects.create(domain=self.domain, name="town", parent_type=self.region) self.data_source_config = DataSourceConfiguration( domain=self.domain, display_name='Locations in Westworld', referenced_doc_type='Location', table_id=clean_table_name(self.domain, str(uuid.uuid4().hex)), configured_filter={}, configured_indicators=[{ "type": "expression", "expression": { "type": "property_name", "property_name": "name" }, "column_id": "location_name", "display_name": "location_name", "datatype": "string" }], ) self.data_source_config.validate() self.data_source_config.save() self.pillow = get_kafka_ucr_pillow() self.pillow.bootstrap(configs=[self.data_source_config]) with trap_extra_setup(KafkaUnavailableError): self.pillow.get_change_feed().get_latest_offsets() def tearDown(self): self.domain_obj.delete() self.data_source_config.delete() def _make_loc(self, name, location_type): return SQLLocation.objects.create(domain=self.domain, name=name, site_code=name, location_type=location_type) def assertDataSourceAccurate(self, expected_locations): adapter = get_indicator_adapter(self.data_source_config) query = adapter.get_query_object() data_source = query.all() self.assertItemsEqual(expected_locations, [row[-1] for row in data_source]) def test_location_data_source(self): self._make_loc("Westworld", self.region) sweetwater = self._make_loc("Sweetwater", self.town) las_mudas = self._make_loc("Las Mudas", self.town) rebuild_indicators(self.data_source_config._id) self.assertDataSourceAccurate(["Westworld", "Sweetwater", "Las Mudas"]) # Insert new location since = self.pillow.get_change_feed().get_latest_offsets() self._make_loc("Blood Arroyo", self.town) # Change an existing location sweetwater.name = "Pariah" sweetwater.save() # Process both changes together and verify that they went through self.pillow.process_changes(since=since, forever=False) self.assertDataSourceAccurate( ["Westworld", "Pariah", "Las Mudas", "Blood Arroyo"]) # Delete a location since = self.pillow.get_change_feed().get_latest_offsets() las_mudas.delete() self.pillow.process_changes(since=since, forever=False) self.assertDataSourceAccurate(["Westworld", "Pariah", "Blood Arroyo"])
def _build_report(self, vals, field='my_field', build_data_source=True): """ Build a new report, and populate it with cases. Return a ConfigurableReportDataSource and a FieldColumn :param vals: List of values to populate the given report field with. :param field: The name of a field in the data source/report :return: Tuple containing a ConfigurableReportDataSource and FieldColumn. The column is a column mapped to the given field. """ # Create Cases for v in vals: update_props = {field: v} if v is not None else {} self._new_case(update_props).save() # Create report data_source_config = DataSourceConfiguration( domain=self.domain, display_name='foo', referenced_doc_type='CommCareCase', table_id=_clean_table_name(self.domain, str(uuid.uuid4().hex)), configured_filter={ "type": "boolean_expression", "operator": "eq", "expression": { "type": "property_name", "property_name": "type" }, "property_value": self.case_type, }, configured_indicators=[{ "type": "expression", "expression": { "type": "property_name", "property_name": field }, "column_id": field, "display_name": field, "datatype": "string" }], ) data_source_config.validate() data_source_config.save() self.addCleanup(data_source_config.delete) if build_data_source: tasks.rebuild_indicators(data_source_config._id) adapter = get_indicator_adapter(data_source_config) adapter.refresh_table() report_config = ReportConfiguration(domain=self.domain, config_id=data_source_config._id, title='foo', aggregation_columns=['doc_id'], columns=[{ "type": "expanded", "field": field, "display": field, "format": "default", }], filters=[], configured_charts=[]) report_config.save() self.addCleanup(report_config.delete) data_source = ReportFactory.from_spec(report_config) adapter = get_indicator_adapter(data_source_config) if build_data_source: adapter.refresh_table() return data_source, data_source.top_level_columns[0]
class ReportDataTest(TestCase): dependent_apps = ['pillowtop'] def setUp(self): # Create report self.domain = 'test-ucr-report-data' self.data_source = DataSourceConfiguration( domain=self.domain, referenced_doc_type='CommCareCase', table_id=uuid.uuid4().hex, configured_filter={}, configured_indicators=[ { "type": "expression", "expression": { "type": "property_name", "property_name": 'name' }, "column_id": 'name', "display_name": 'name', "datatype": "string" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'number' }, "column_id": 'number', "display_name": 'number', "datatype": "integer" } ], ) self.data_source.validate() self.data_source.save() IndicatorSqlAdapter(self.data_source).rebuild_table() self.addCleanup(self.data_source.delete) # initialize a report on the data self.report_config = ReportConfiguration( domain=self.domain, config_id=self.data_source._id, aggregation_columns=['doc_id'], columns=[ { "type": "field", "field": "name", "column_id": "name", "display": "Name", "aggregation": "simple", }, { "type": "field", "field": "number", "column_id": "number", "display": "Number", "aggregation": "simple", } ], filters=[], configured_charts=[] ) self.report_config.save() self.addCleanup(self.report_config.delete) def _add_some_rows(self, count): rows = [ReportDataTestRow(uuid.uuid4().hex, i) for i in range(count)] self._add_rows(rows) return rows def _add_rows(self, rows): pillow = get_kafka_ucr_pillow() pillow.bootstrap(configs=[self.data_source]) def _get_case(row): return { '_id': uuid.uuid4().hex, 'domain': self.domain, 'doc_type': 'CommCareCase', 'type': 'city', 'name': row.name, 'number': row.number, } for row in rows: pillow.process_change(doc_to_change(_get_case(row))) def test_basic_query(self): # add a few rows to the data source rows = self._add_some_rows(3) # check the returned data from the report looks right report_data_source = ReportFactory.from_spec(self.report_config) report_data = report_data_source.get_data() self.assertEqual(len(rows), len(report_data)) rows_by_name = {r.name: r for r in rows} for row in report_data: self.assertTrue(row['name'] in rows_by_name) self.assertEqual(rows_by_name[row['name']].number, row['number']) def test_limit(self): count = 5 self._add_some_rows(count) report_data_source = ReportFactory.from_spec(self.report_config) original_data = report_data_source.get_data() self.assertEqual(count, len(original_data)) limited_data = report_data_source.get_data(limit=3) self.assertEqual(3, len(limited_data)) self.assertEqual(original_data[:3], limited_data) def test_skip(self): count = 5 self._add_some_rows(count) report_data_source = ReportFactory.from_spec(self.report_config) original_data = report_data_source.get_data() self.assertEqual(count, len(original_data)) skipped = report_data_source.get_data(start=3) self.assertEqual(count - 3, len(skipped)) self.assertEqual(original_data[3:], skipped)
def test_report_builder_datasource_deactivation(self): def _get_data_source(id_): return get_datasource_config(id_, self.project.name)[0] # Upgrade the domain # (for the upgrade to work, there has to be an existing subscription, # which is why we subscribe to advanced first) self._subscribe_to_advanced() pro_with_rb_sub = self._subscribe_to_pro_with_rb() # Create reports and data sources builder_report_data_source = DataSourceConfiguration( domain=self.project.name, is_deactivated=False, referenced_doc_type="XFormInstance", table_id="foo", ) other_data_source = DataSourceConfiguration( domain=self.project.name, is_deactivated=False, referenced_doc_type="XFormInstance", table_id="bar", ) builder_report_data_source.save() other_data_source.save() report_builder_report = ReportConfiguration( domain=self.project.name, config_id=builder_report_data_source._id, report_meta=ReportMeta(created_by_builder=True), ) report_builder_report.save() # downgrade the domain pro_with_rb_sub.cancel_subscription(web_user=self.admin_user.username) # Check that the builder data source is deactivated builder_report_data_source = _get_data_source(builder_report_data_source._id) self.assertTrue(builder_report_data_source.is_deactivated) # Check that the other data source has not been deactivated other_data_source = _get_data_source(other_data_source._id) self.assertFalse(other_data_source.is_deactivated) # upgrade the domain # (for the upgrade to work, there has to be an existing subscription, # which is why we subscribe to advanced first) self._subscribe_to_advanced() pro_with_rb_sub = self._subscribe_to_pro_with_rb() # check that the data source is activated builder_report_data_source = _get_data_source(builder_report_data_source._id) self.assertFalse(builder_report_data_source.is_deactivated) # delete the data sources builder_report_data_source.delete() other_data_source.delete() # Delete the report report_builder_report.delete() # reset the subscription pro_with_rb_sub.cancel_subscription(web_user=self.admin_user.username)
def _build_report(self, vals, field='my_field', build_data_source=True): """ Build a new report, and populate it with cases. Return a ConfigurableReportDataSource and a FieldColumn :param vals: List of values to populate the given report field with. :param field: The name of a field in the data source/report :return: Tuple containing a ConfigurableReportDataSource and FieldColumn. The column is a column mapped to the given field. """ # Create Cases for v in vals: update_props = {field: v} if v is not None else {} self._new_case(update_props).save() # Create report data_source_config = DataSourceConfiguration( domain=self.domain, display_name='foo', referenced_doc_type='CommCareCase', table_id=_clean_table_name(self.domain, str(uuid.uuid4().hex)), configured_filter={ "type": "boolean_expression", "operator": "eq", "expression": { "type": "property_name", "property_name": "type" }, "property_value": self.case_type, }, configured_indicators=[{ "type": "expression", "expression": { "type": "property_name", "property_name": field }, "column_id": field, "display_name": field, "datatype": "string" }], ) data_source_config.validate() data_source_config.save() self.addCleanup(data_source_config.delete) if build_data_source: tasks.rebuild_indicators(data_source_config._id) adapter = get_indicator_adapter(data_source_config) adapter.refresh_table() report_config = ReportConfiguration( domain=self.domain, config_id=data_source_config._id, title='foo', aggregation_columns=['doc_id'], columns=[{ "type": "expanded", "field": field, "display": field, "format": "default", }], filters=[], configured_charts=[] ) report_config.save() self.addCleanup(report_config.delete) data_source = ReportFactory.from_spec(report_config) adapter = get_indicator_adapter(data_source_config) if build_data_source: adapter.refresh_table() return data_source, data_source.top_level_columns[0]
class ReportDataTest(TestCase): def setUp(self): super(ReportDataTest, self).setUp() # Create report self.domain = 'test-ucr-report-data' self.data_source = DataSourceConfiguration( domain=self.domain, referenced_doc_type='CommCareCase', table_id=uuid.uuid4().hex, configured_filter={}, configured_indicators=[ { "type": "expression", "expression": { "type": "property_name", "property_name": 'name' }, "column_id": 'name', "display_name": 'name', "datatype": "string" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'number' }, "column_id": 'number', "display_name": 'number', "datatype": "integer" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'number' }, "column_id": 'string-number', "display_name": 'string-number', "datatype": "string" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'just_for_sorting' }, "column_id": 'just_for_sorting', "display_name": 'just_for_sorting', "datatype": "string" } ], ) self.data_source.validate() self.data_source.save() self.adapter = get_indicator_adapter(self.data_source) self.adapter.rebuild_table() self.addCleanup(self.data_source.delete) # initialize a report on the data self.report_config = ReportConfiguration( domain=self.domain, config_id=self.data_source._id, aggregation_columns=['doc_id'], columns=[ { "type": "field", "field": "name", "column_id": "name", "display": "Name", "aggregation": "simple", }, { "type": "field", "field": "number", "column_id": "number", "display": "Number", "aggregation": "simple", "calculate_total": True, }, { "type": "expression", "column_id": "ten", "display": "The Number Ten", "expression": { 'type': 'constant', 'constant': 10, } }, { "type": "expression", "column_id": "by_tens", "display": "Counting by tens", "expression": { "type": "evaluator", "statement": "a * b", "context_variables": { "a": { "type": "property_name", "property_name": "number", }, "b": { "type": "property_name", "property_name": "ten", } } } }, { "type": "field", "field": 'string-number', "display": 'Display Number', "aggregation": "simple", "transform": { "type": "translation", "translations": { "0": "zero", "1": {"en": "one", "es": "uno"}, "2": {"en": "two", "es": "dos"} }, }, } ], filters=[], configured_charts=[], sort_expression=[{'field': 'just_for_sorting', 'order': 'DESC'}] ) self.report_config.save() self.addCleanup(self.report_config.delete) def _add_some_rows(self, count): rows = [ReportDataTestRow(uuid.uuid4().hex, i, i) for i in range(count)] self._add_rows(rows) return rows def _add_rows(self, rows): pillow = get_case_pillow(ucr_configs=[self.data_source]) def _get_case(row): return { '_id': uuid.uuid4().hex, 'domain': self.domain, 'doc_type': 'CommCareCase', 'type': 'city', 'name': row.name, 'number': row.number, 'just_for_sorting': row.sort_key, } for row in rows: pillow.process_change(doc_to_change(_get_case(row))) def test_basic_query(self): # add a few rows to the data source rows = self._add_some_rows(3) # check the returned data from the report looks right report_data_source = ConfigurableReportDataSource.from_spec(self.report_config) report_data = report_data_source.get_data() self.assertEqual(len(rows), len(report_data)) rows_by_name = {r.name: r for r in rows} for row in report_data: self.assertTrue(row['name'] in rows_by_name) self.assertEqual(rows_by_name[row['name']].number, row['number']) self.assertEqual(10, row['ten']) self.assertEqual(10 * row['number'], row['by_tens']) def test_limit(self): count = 5 self._add_some_rows(count) report_data_source = ConfigurableReportDataSource.from_spec(self.report_config) original_data = report_data_source.get_data() self.assertEqual(count, len(original_data)) limited_data = report_data_source.get_data(limit=3) self.assertEqual(3, len(limited_data)) self.assertEqual(original_data[:3], limited_data) def test_skip(self): count = 5 self._add_some_rows(count) report_data_source = ConfigurableReportDataSource.from_spec(self.report_config) original_data = report_data_source.get_data() self.assertEqual(count, len(original_data)) skipped = report_data_source.get_data(start=3) self.assertEqual(count - 3, len(skipped)) self.assertEqual(original_data[3:], skipped) def test_total_row(self): rows = self._add_some_rows(3) report_data_source = ConfigurableReportDataSource.from_spec(self.report_config) total_number = sum(row.number for row in rows) self.assertEqual(report_data_source.get_total_row(), ['Total', total_number, '', '', '']) def test_transform(self): count = 5 self._add_some_rows(count) report_data_source = ConfigurableReportDataSource.from_spec(self.report_config) original_data = report_data_source.get_data() self.assertEqual(count, len(original_data)) rows_by_number = {int(row['number']): row for row in original_data} # Make sure the translations happened self.assertEqual(rows_by_number[0]['string-number'], "zero") self.assertEqual(rows_by_number[1]['string-number'], "one") self.assertEqual(rows_by_number[2]['string-number'], "two") # These last two are untranslated self.assertEqual(rows_by_number[3]['string-number'], "3") self.assertEqual(rows_by_number[4]['string-number'], "4")
def _build_report_and_view(self): # Create report data_source_config = DataSourceConfiguration( domain=self.domain, display_name='foo', referenced_doc_type='CommCareCase', table_id="woop_woop", configured_filter={ "type": "boolean_expression", "operator": "eq", "expression": { "type": "property_name", "property_name": "type" }, "property_value": self.case_type, }, configured_indicators=[ { "type": "expression", "expression": { "type": "property_name", "property_name": 'fruit' }, "column_id": 'indicator_col_id_fruit', "display_name": 'indicator_display_name_fruit', "datatype": "string" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'num1' }, "column_id": 'indicator_col_id_num1', "datatype": "integer" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'num2' }, "column_id": 'indicator_col_id_num2', "datatype": "integer" }, ], ) data_source_config.validate() data_source_config.save() self.addCleanup(data_source_config.delete) tasks.rebuild_indicators(data_source_config._id) report_config = ReportConfiguration( domain=self.domain, config_id=data_source_config._id, title='foo', aggregation_columns=['doc_id'], columns=[ { "type": "field", "display": "report_column_display_fruit", "field": 'indicator_col_id_fruit', 'column_id': 'report_column_col_id_fruit', 'aggregation': 'simple' }, { "type": "percent", "display": "report_column_display_percent", 'column_id': 'report_column_col_id_percent', 'format': 'percent', "denominator": { "type": "field", "aggregation": "sum", "field": "indicator_col_id_num1", "column_id": "report_column_col_id_percent_num1" }, "numerator": { "type": "field", "aggregation": "sum", "field": "indicator_col_id_num2", "column_id": "report_column_col_id_percent_num2" } }, ], ) report_config.save() self.addCleanup(report_config.delete) view = ConfigurableReportView(request=HttpRequest()) view._domain = self.domain view._lang = "en" view._report_config_id = report_config._id return report_config, view
class ReportDataTest(TestCase): def setUp(self): super(ReportDataTest, self).setUp() # Create report self.domain = 'test-ucr-report-data' self.data_source = DataSourceConfiguration( domain=self.domain, referenced_doc_type='CommCareCase', table_id=uuid.uuid4().hex, configured_filter={}, configured_indicators=[{ "type": "expression", "expression": { "type": "property_name", "property_name": 'name' }, "column_id": 'name', "display_name": 'name', "datatype": "string" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'number' }, "column_id": 'number', "display_name": 'number', "datatype": "integer" }], ) self.data_source.validate() self.data_source.save() self.adapter = get_indicator_adapter(self.data_source) self.adapter.rebuild_table() self.addCleanup(self.data_source.delete) # initialize a report on the data self.report_config = ReportConfiguration( domain=self.domain, config_id=self.data_source._id, aggregation_columns=['doc_id'], columns=[{ "type": "field", "field": "name", "column_id": "name", "display": "Name", "aggregation": "simple", }, { "type": "field", "field": "number", "column_id": "number", "display": "Number", "aggregation": "simple", }, { "type": "expression", "column_id": "ten", "display": "The Number Ten", "expression": { 'type': 'constant', 'constant': 10, } }, { "type": "expression", "column_id": "by_tens", "display": "Counting by tens", "expression": { "type": "evaluator", "statement": "a * b", "context_variables": { "a": { "type": "property_name", "property_name": "number", }, "b": { "type": "property_name", "property_name": "ten", } } } }], filters=[], configured_charts=[]) self.report_config.save() self.addCleanup(self.report_config.delete) def _add_some_rows(self, count): rows = [ReportDataTestRow(uuid.uuid4().hex, i) for i in range(count)] self._add_rows(rows) self.adapter.refresh_table() return rows def _add_rows(self, rows): pillow = get_kafka_ucr_pillow() pillow.bootstrap(configs=[self.data_source]) def _get_case(row): return { '_id': uuid.uuid4().hex, 'domain': self.domain, 'doc_type': 'CommCareCase', 'type': 'city', 'name': row.name, 'number': row.number, } for row in rows: pillow.process_change(doc_to_change(_get_case(row))) @run_with_all_ucr_backends def test_basic_query(self): # add a few rows to the data source rows = self._add_some_rows(3) # check the returned data from the report looks right report_data_source = ReportFactory.from_spec(self.report_config) report_data = report_data_source.get_data() self.assertEqual(len(rows), len(report_data)) rows_by_name = {r.name: r for r in rows} for row in report_data: self.assertTrue(row['name'] in rows_by_name) self.assertEqual(rows_by_name[row['name']].number, row['number']) self.assertEqual(10, row['ten']) self.assertEqual(10 * row['number'], row['by_tens']) @run_with_all_ucr_backends def test_limit(self): count = 5 self._add_some_rows(count) report_data_source = ReportFactory.from_spec(self.report_config) original_data = report_data_source.get_data() self.assertEqual(count, len(original_data)) limited_data = report_data_source.get_data(limit=3) self.assertEqual(3, len(limited_data)) self.assertEqual(original_data[:3], limited_data) @run_with_all_ucr_backends def test_skip(self): count = 5 self._add_some_rows(count) report_data_source = ReportFactory.from_spec(self.report_config) original_data = report_data_source.get_data() self.assertEqual(count, len(original_data)) skipped = report_data_source.get_data(start=3) self.assertEqual(count - 3, len(skipped)) self.assertEqual(original_data[3:], skipped)
def test_report_builder_datasource_deactivation(self): def _get_data_source(id_): return get_datasource_config(id_, self.project.name)[0] # Upgrade the domain # (for the upgrade to work, there has to be an existing subscription, # which is why we subscribe to advanced first) self._subscribe_to_advanced() pro_with_rb_sub = self._subscribe_to_pro_with_rb() # Create reports and data sources builder_report_data_source = DataSourceConfiguration( domain=self.project.name, is_deactivated=False, referenced_doc_type="XFormInstance", table_id="foo", ) other_data_source = DataSourceConfiguration( domain=self.project.name, is_deactivated=False, referenced_doc_type="XFormInstance", table_id="bar", ) builder_report_data_source.save() other_data_source.save() report_builder_report = ReportConfiguration( domain=self.project.name, config_id=builder_report_data_source._id, report_meta=ReportMeta(created_by_builder=True), ) report_builder_report.save() # downgrade the domain community_sub = pro_with_rb_sub.change_plan( DefaultProductPlan.get_default_plan_version()) # Check that the builder data source is deactivated builder_report_data_source = _get_data_source( builder_report_data_source._id) self.assertTrue(builder_report_data_source.is_deactivated) # Check that the other data source has not been deactivated other_data_source = _get_data_source(other_data_source._id) self.assertFalse(other_data_source.is_deactivated) # upgrade the domain # (for the upgrade to work, there has to be an existing subscription, # which is why we subscribe to advanced first) community_sub.change_plan( DefaultProductPlan.get_default_plan_version( edition=SoftwarePlanEdition.ADVANCED)) pro_with_rb_sub = self._subscribe_to_pro_with_rb() # check that the data source is activated builder_report_data_source = _get_data_source( builder_report_data_source._id) self.assertFalse(builder_report_data_source.is_deactivated) # delete the data sources builder_report_data_source.delete() other_data_source.delete() # Delete the report report_builder_report.delete() # reset the subscription pro_with_rb_sub.change_plan( DefaultProductPlan.get_default_plan_version())
class TestLocationDataSource(TestCase): domain = "delos_corp" def setUp(self): delete_all_locations() self.domain_obj = create_domain(self.domain) self.region = LocationType.objects.create(domain=self.domain, name="region") self.town = LocationType.objects.create(domain=self.domain, name="town", parent_type=self.region) self.data_source_config = DataSourceConfiguration( domain=self.domain, display_name='Locations in Westworld', referenced_doc_type='Location', table_id=_clean_table_name(self.domain, str(uuid.uuid4().hex)), configured_filter={}, configured_indicators=[{ "type": "expression", "expression": { "type": "property_name", "property_name": "name" }, "column_id": "location_name", "display_name": "location_name", "datatype": "string" }], ) self.data_source_config.validate() self.data_source_config.save() self.pillow = get_kafka_ucr_pillow() self.pillow.bootstrap(configs=[self.data_source_config]) with trap_extra_setup(KafkaUnavailableError): self.pillow.get_change_feed().get_current_offsets() def tearDown(self): self.domain_obj.delete() delete_all_locations() self.data_source_config.delete() def _make_loc(self, name, location_type): return SQLLocation.objects.create( domain=self.domain, name=name, site_code=name, location_type=location_type) def assertDataSourceAccurate(self, expected_locations): adapter = get_indicator_adapter(self.data_source_config) query = adapter.get_query_object() adapter.refresh_table() data_source = query.all() self.assertItemsEqual( expected_locations, [row[-1] for row in data_source] ) def test_location_data_source(self): self._make_loc("Westworld", self.region) sweetwater = self._make_loc("Sweetwater", self.town) las_mudas = self._make_loc("Las Mudas", self.town) rebuild_indicators(self.data_source_config._id) self.assertDataSourceAccurate(["Westworld", "Sweetwater", "Las Mudas"]) # Insert new location since = self.pillow.get_change_feed().get_current_offsets() self._make_loc("Blood Arroyo", self.town) # Change an existing location sweetwater.name = "Pariah" sweetwater.save() # Process both changes together and verify that they went through self.pillow.process_changes(since=since, forever=False) self.assertDataSourceAccurate(["Westworld", "Pariah", "Las Mudas", "Blood Arroyo"]) # Delete a location since = self.pillow.get_change_feed().get_current_offsets() las_mudas.delete() self.pillow.process_changes(since=since, forever=False) # No actual change - deletions are not yet processed self.assertDataSourceAccurate(["Westworld", "Pariah", "Las Mudas", "Blood Arroyo"])
def _build_report_and_view(self): # Create report data_source_config = DataSourceConfiguration( domain=self.domain, display_name='foo', referenced_doc_type='CommCareCase', table_id="woop_woop", configured_filter={ "type": "boolean_expression", "operator": "eq", "expression": { "type": "property_name", "property_name": "type" }, "property_value": self.case_type, }, configured_indicators=[ { "type": "expression", "expression": { "type": "property_name", "property_name": 'fruit' }, "column_id": 'indicator_col_id_fruit', "display_name": 'indicator_display_name_fruit', "datatype": "string" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'num1' }, "column_id": 'indicator_col_id_num1', "datatype": "integer" }, { "type": "expression", "expression": { "type": "property_name", "property_name": 'num2' }, "column_id": 'indicator_col_id_num2', "datatype": "integer" }, ], ) data_source_config.validate() data_source_config.save() self.addCleanup(data_source_config.delete) tasks.rebuild_indicators(data_source_config._id) adapter = get_indicator_adapter(data_source_config) adapter.refresh_table() report_config = ReportConfiguration( domain=self.domain, config_id=data_source_config._id, title='foo', aggregation_columns=['doc_id'], columns=[ { "type": "field", "display": "report_column_display_fruit", "field": 'indicator_col_id_fruit', 'column_id': 'report_column_col_id_fruit', 'aggregation': 'simple' }, { "type": "percent", "display": "report_column_display_percent", 'column_id': 'report_column_col_id_percent', 'format': 'percent', "denominator": { "type": "field", "aggregation": "sum", "field": "indicator_col_id_num1", "column_id": "report_column_col_id_percent_num1" }, "numerator": { "type": "field", "aggregation": "sum", "field": "indicator_col_id_num2", "column_id": "report_column_col_id_percent_num2" } }, ], ) report_config.save() self.addCleanup(report_config.delete) view = ConfigurableReport(request=HttpRequest()) view._domain = self.domain view._lang = "en" view._report_config_id = report_config._id return report_config, view