def test_report_data_source(self): # bootstrap report data sources against indicator data sources report_config_template = get_sample_report_config() report_config_1 = ReportConfiguration.wrap(report_config_template.to_json()) report_config_1.config_id = self.ds_1._id report_config_2 = ReportConfiguration.wrap(report_config_template.to_json()) report_config_2.config_id = self.ds_2._id # save a few docs to ds 1 sample_doc, _ = get_sample_doc_and_indicators() num_docs = 3 for i in range(num_docs): sample_doc['_id'] = uuid.uuid4().hex self.ds1_adapter.save(sample_doc) # ds 1 should have data, ds2 should not ds1_rows = ConfigurableReportDataSource.from_spec(report_config_1).get_data() self.assertEqual(1, len(ds1_rows)) self.assertEqual(num_docs, ds1_rows[0]['count']) ds2_rows = ConfigurableReportDataSource.from_spec(report_config_2).get_data() self.assertEqual(0, len(ds2_rows), ds2_rows) # save one doc to ds 2 sample_doc['_id'] = uuid.uuid4().hex self.ds2_adapter.save(sample_doc) # ds 1 should still have same data, ds2 should now have one row ds1_rows = ConfigurableReportDataSource.from_spec(report_config_1).get_data() self.assertEqual(1, len(ds1_rows)) self.assertEqual(num_docs, ds1_rows[0]['count']) ds2_rows = ConfigurableReportDataSource.from_spec(report_config_2).get_data() self.assertEqual(1, len(ds2_rows)) self.assertEqual(1, ds2_rows[0]['count'])
def validate(self, required=True): from corehq.apps.userreports.reports.data_source import ConfigurableReportDataSource def _check_for_duplicates(supposedly_unique_list, error_msg): # http://stackoverflow.com/questions/9835762/find-and-list-duplicates-in-python-list duplicate_items = set([ item for item in supposedly_unique_list if supposedly_unique_list.count(item) > 1 ]) if len(duplicate_items) > 0: raise BadSpecError( _(error_msg).format(', '.join(sorted(duplicate_items)))) super(ReportConfiguration, self).validate(required) # check duplicates before passing to factory since it chokes on them _check_for_duplicates( [FilterSpec.wrap(f).slug for f in self.filters], 'Filters cannot contain duplicate slugs: {}', ) _check_for_duplicates( [ column_id for c in self.report_columns for column_id in c.get_column_ids() ], 'Columns cannot contain duplicate column_ids: {}', ) # these calls all implicitly do validation ConfigurableReportDataSource.from_spec(self) self.ui_filters self.charts self.sort_order
def validate(self, required=True): from corehq.apps.userreports.reports.data_source import ConfigurableReportDataSource def _check_for_duplicates(supposedly_unique_list, error_msg): # http://stackoverflow.com/questions/9835762/find-and-list-duplicates-in-python-list duplicate_items = set( [item for item in supposedly_unique_list if supposedly_unique_list.count(item) > 1] ) if len(duplicate_items) > 0: raise BadSpecError( _(error_msg).format(', '.join(sorted(duplicate_items))) ) super(ReportConfiguration, self).validate(required) # check duplicates before passing to factory since it chokes on them _check_for_duplicates( [FilterSpec.wrap(f).slug for f in self.filters], 'Filters cannot contain duplicate slugs: {}', ) _check_for_duplicates( [column_id for c in self.report_columns for column_id in c.get_column_ids()], 'Columns cannot contain duplicate column_ids: {}', ) # these calls all implicitly do validation ConfigurableReportDataSource.from_spec(self) self.ui_filters self.charts self.sort_order
def test_report_data_source(self): # bootstrap report data sources against indicator data sources report_config_template = get_sample_report_config() report_config_1 = ReportConfiguration.wrap(report_config_template.to_json()) report_config_1.config_id = self.ds_1._id report_config_2 = ReportConfiguration.wrap(report_config_template.to_json()) report_config_2.config_id = self.ds_2._id # save a few docs to ds 1 sample_doc, _ = get_sample_doc_and_indicators() num_docs = 3 for i in range(num_docs): sample_doc['_id'] = uuid.uuid4().hex self.ds1_adapter.save(sample_doc) # ds 1 should have data, ds2 should not ds1_rows = ConfigurableReportDataSource.from_spec(report_config_1).get_data() self.assertEqual(1, len(ds1_rows)) self.assertEqual(num_docs, ds1_rows[0]['count']) ds2_rows = ConfigurableReportDataSource.from_spec(report_config_2).get_data() self.assertEqual(0, len(ds2_rows)) # save one doc to ds 2 sample_doc['_id'] = uuid.uuid4().hex self.ds2_adapter.save(sample_doc) # ds 1 should still have same data, ds2 should now have one row ds1_rows = ConfigurableReportDataSource.from_spec(report_config_1).get_data() self.assertEqual(1, len(ds1_rows)) self.assertEqual(num_docs, ds1_rows[0]['count']) ds2_rows = ConfigurableReportDataSource.from_spec(report_config_2).get_data() self.assertEqual(1, len(ds2_rows)) self.assertEqual(1, ds2_rows[0]['count'])
def _run_report(spec, engine_id=None): data_source = ConfigurableReportDataSource.from_spec(spec, include_prefilters=True) if engine_id: data_source.data_source.override_engine_id(engine_id) data_source.set_filter_values(filter_values) if sort_column: data_source.set_order_by( [(data_source.top_level_columns[int(sort_column)].column_id, sort_order.upper())] ) if params: datatables_params = DatatablesParams.from_request_dict(params) start = datatables_params.start limit = datatables_params.count else: start, limit = None, None page = list(data_source.get_data(start=start, limit=limit)) total_records = data_source.get_total_records() json_response = { 'aaData': page, "iTotalRecords": total_records, } total_row = data_source.get_total_row() if data_source.has_total_row else None if total_row is not None: json_response["total_row"] = total_row return json_response
def _get_report_data(self, report_config, domain, start, limit, get_params): report = ConfigurableReportDataSource.from_spec( report_config, include_prefilters=True) string_type_params = [ filter.name for filter in report_config.ui_filters if getattr(filter, 'datatype', 'string') == "string" ] filter_values = get_filter_values( report_config.ui_filters, query_dict_to_dict(get_params, domain, string_type_params)) report.set_filter_values(filter_values) page = list(report.get_data(start=start, limit=limit)) columns = [] for column in report.columns: simple_column = { "header": column.header, "slug": column.slug, } if isinstance(column, UCRExpandDatabaseSubcolumn): simple_column['expand_column_value'] = column.expand_value columns.append(simple_column) total_records = report.get_total_records() return page, columns, total_records
def _get_report_data(self, report_config, domain, start, limit, get_params): report = ConfigurableReportDataSource.from_spec(report_config) string_type_params = [ filter.name for filter in report_config.ui_filters if getattr(filter, 'datatype', 'string') == "string" ] filter_values = get_filter_values( report_config.ui_filters, query_dict_to_dict(get_params, domain, string_type_params) ) report.set_filter_values(filter_values) page = list(report.get_data(start=start, limit=limit)) columns = [] for column in report.columns: simple_column = { "header": column.header, "slug": column.slug, } if isinstance(column, UCRExpandDatabaseSubcolumn): simple_column['expand_column_value'] = column.expand_value columns.append(simple_column) total_records = report.get_total_records() return page, columns, total_records
def _run_report(spec, engine_id=None): data_source = ConfigurableReportDataSource.from_spec(spec, include_prefilters=True) if engine_id: data_source.override_engine_id(engine_id) data_source.set_filter_values(filter_values) if sort_column: data_source.set_order_by( [(data_source.top_level_columns[int(sort_column)].column_id, sort_order.upper())] ) if params: datatables_params = DatatablesParams.from_request_dict(params) start = datatables_params.start limit = datatables_params.count else: start, limit = None, None page = list(data_source.get_data(start=start, limit=limit)) total_records = data_source.get_total_records() json_response = { 'aaData': page, "iTotalRecords": total_records, } total_row = data_source.get_total_row() if data_source.has_total_row else None if total_row is not None: json_response["total_row"] = total_row return json_response
def data_source(self): from corehq.apps.userreports.reports.data_source import ConfigurableReportDataSource data_source = ConfigurableReportDataSource.from_spec(self.report_config, include_prefilters=True) data_source.lang = self.lang data_source.set_filter_values(self.filter_values) data_source.set_order_by([(o['field'], o['order']) for o in self.report_config.sort_expression]) return data_source
def test_total_row(self): rows = self._add_some_rows(3) report_data_source = ConfigurableReportDataSource.from_spec( self.report_config) total_number = sum(row.number for row in rows) self.assertEqual(report_data_source.get_total_row(), ['Total', total_number, '', '', ''])
def from_spec(cls, spec): from corehq.apps.userreports.reports.data_source import ConfigurableReportDataSource return ConfigurableReportDataSource( domain=spec.domain, config_or_config_id=spec.config_id, filters=[ReportFilter.wrap(f) for f in spec.filters], aggregation_columns=spec.aggregation_columns, columns=spec.report_columns, )
def get_ucr_data(report_config, date_filter, date_span): from corehq.apps.userreports.reports.view import get_filter_values data_source = ConfigurableReportDataSource.from_spec(report_config, include_prefilters=True) filter_params = get_date_params(date_filter['slug'], date_span) if date_filter else {} filter_values = get_filter_values(report_config.ui_filters, filter_params) data_source.set_filter_values(filter_values) return data_source.get_data()
def test_skip(self): count = 5 self._add_some_rows(count) report_data_source = ConfigurableReportDataSource.from_spec(self.report_config) original_data = report_data_source.get_data() self.assertEqual(count, len(original_data)) skipped = report_data_source.get_data(start=3) self.assertEqual(count - 3, len(skipped)) self.assertEqual(original_data[3:], skipped)
def test_limit(self): count = 5 self._add_some_rows(count) report_data_source = ConfigurableReportDataSource.from_spec(self.report_config) original_data = report_data_source.get_data() self.assertEqual(count, len(original_data)) limited_data = report_data_source.get_data(limit=3) self.assertEqual(3, len(limited_data)) self.assertEqual(original_data[:3], limited_data)
def get_ucr_data(report_config, date_filter, date_span): from corehq.apps.userreports.reports.view import get_filter_values data_source = ConfigurableReportDataSource.from_spec(report_config, include_prefilters=True) filter_params = get_date_params(date_filter['slug'], date_span) if date_filter else {} filter_values = get_filter_values(report_config.ui_filters, filter_params) data_source.set_filter_values(filter_values) return data_source.get_data()
def from_spec(cls, spec, include_prefilters=False): from corehq.apps.userreports.reports.data_source import ConfigurableReportDataSource order_by = [(o['field'], o['order']) for o in spec.sort_expression] filters = spec.filters if include_prefilters else spec.filters_without_prefilters return ConfigurableReportDataSource( domain=spec.domain, config_or_config_id=spec.config_id, filters=[ReportFilter.wrap(f) for f in filters], aggregation_columns=spec.aggregation_columns, columns=spec.report_columns, order_by=order_by, )
def test_transform(self): count = 5 self._add_some_rows(count) report_data_source = ConfigurableReportDataSource.from_spec(self.report_config) original_data = report_data_source.get_data() self.assertEqual(count, len(original_data)) rows_by_number = {int(row['number']): row for row in original_data} # Make sure the translations happened self.assertEqual(rows_by_number[0]['string-number'], "zero") self.assertEqual(rows_by_number[1]['string-number'], "one") self.assertEqual(rows_by_number[2]['string-number'], "two") # These last two are untranslated self.assertEqual(rows_by_number[3]['string-number'], "3") self.assertEqual(rows_by_number[4]['string-number'], "4")
def test_basic_query(self): # add a few rows to the data source rows = self._add_some_rows(3) # check the returned data from the report looks right report_data_source = ConfigurableReportDataSource.from_spec(self.report_config) report_data = report_data_source.get_data() self.assertEqual(len(rows), len(report_data)) rows_by_name = {r.name: r for r in rows} for row in report_data: self.assertTrue(row['name'] in rows_by_name) self.assertEqual(rows_by_name[row['name']].number, row['number']) self.assertEqual(10, row['ten']) self.assertEqual(10 * row['number'], row['by_tens'])
def valid_ucr_date_column(ucr, column_name): data_source = ConfigurableReportDataSource.from_spec(ucr, include_prefilters=True) for datum in data_source.get_data(): value = datum.get(column_name) try: if type(value) is not str: return False else: datetime.strptime(value, "%Y-%m-%d") except ValueError: return False return True
def test_group_by_missing_from_columns(self): report_config = ReportConfiguration(domain='somedomain', config_id='someconfig', aggregation_columns=['doc_id'], columns=[{ "type": "field", "field": "somefield", "format": "default", "aggregation": "sum" }], filters=[], configured_charts=[]) data_source = ConfigurableReportDataSource.from_spec(report_config) with mock_datasource_config(): self.assertEqual(['doc_id'], data_source.group_by)
def test_group_by_missing_from_columns(self): report_config = ReportConfiguration( domain='somedomain', config_id='someconfig', aggregation_columns=['doc_id'], columns=[{ "type": "field", "field": "somefield", "format": "default", "aggregation": "sum" }], filters=[], configured_charts=[] ) data_source = ConfigurableReportDataSource.from_spec(report_config) with mock_datasource_config(): self.assertEqual(['doc_id'], data_source.group_by)
def ucr_report(self): spec = StaticReportConfiguration.by_id('static-%s-adherence' % self.domain) report = ConfigurableReportDataSource.from_spec( spec, include_prefilters=True) filter_values = get_filter_values(spec.ui_filters, self.request_params, self.request.couch_user) locations_id = [ Choice(value=location_id, display='') for location_id in self.report_config.locations_id if location_id ] if locations_id: filter_values['village'] = locations_id report.set_filter_values(filter_values) return report
def _build_report(self, vals, field='my_field', build_data_source=True): """ Build a new report, and populate it with cases. Return a ConfigurableReportDataSource and a FieldColumn :param vals: List of values to populate the given report field with. :param field: The name of a field in the data source/report :return: Tuple containing a ConfigurableReportDataSource and FieldColumn. The column is a column mapped to the given field. """ # Create Cases for v in vals: update_props = {field: v} if v is not None else {} self._new_case(update_props).save() if build_data_source: tasks.rebuild_indicators(self.data_source_config._id) adapter = get_indicator_adapter(self.data_source_config) adapter.refresh_table() report_config = ReportConfiguration( domain=self.domain, config_id=self.data_source_config._id, title='foo', aggregation_columns=['doc_id'], columns=[{ "type": "expanded", "field": field, "display": field, "format": "default", }], filters=[], configured_charts=[] ) report_config.save() self.addCleanup(report_config.delete) data_source = ConfigurableReportDataSource.from_spec(report_config) return data_source, data_source.top_level_columns[0]
def _build_report(self, vals, field='my_field', build_data_source=True): """ Build a new report, and populate it with cases. Return a ConfigurableReportDataSource and a FieldColumn :param vals: List of values to populate the given report field with. :param field: The name of a field in the data source/report :return: Tuple containing a ConfigurableReportDataSource and FieldColumn. The column is a column mapped to the given field. """ # Create Cases for v in vals: update_props = {field: v} if v is not None else {} self._new_case(update_props).save() if build_data_source: tasks.rebuild_indicators(self.data_source_config._id) report_config = ReportConfiguration( domain=self.domain, config_id=self.data_source_config._id, title='foo', aggregation_columns=['doc_id'], columns=[{ "type": "expanded", "field": field, "display": field, "format": "default", }], filters=[], configured_charts=[] ) report_config.save() self.addCleanup(report_config.delete) data_source = ConfigurableReportDataSource.from_spec(report_config) return data_source, data_source.top_level_columns[0]
def _get_report_and_data_source(report_id, domain): report = get_report_config(report_id, domain)[0] data_source = ConfigurableReportDataSource.from_spec( report, include_prefilters=True) return report, data_source
def charts(self): case_finding_sql_data = self.case_finding_sql_data[0] sputum_conversion_report = ConfigurableReportDataSource.from_spec( StaticReportConfiguration.by_id('static-%s-sputum_conversion' % self.domain), include_prefilters=True) filter_values = {'date': self.datespan} locations_id = [ Choice(value=location_id, display='') for location_id in self.report_config.locations_id if location_id ] if locations_id: filter_values['village'] = locations_id if self.report_config.is_migrated is not None: filter_values['is_migrated'] = Choice( value=self.report_config.is_migrated, display='') sputum_conversion_report.set_filter_values(filter_values) sputum_conversion_data = sputum_conversion_report.get_data()[0] charts_sql_data = self.charts_sql_data[0] treatment_outcome_sql_data = self.treatment_outcome_sql_data[0] default_value = {'sort_key': 0} chart = PieChart(title=_('Cases by Gender'), key='gender', values=[]) chart.data = [{ 'label': _('Male'), 'value': case_finding_sql_data.get('male_total', default_value)['sort_key'] }, { 'label': _('Female'), 'value': case_finding_sql_data.get('female_total', default_value)['sort_key'] }, { 'label': _('Transgender'), 'value': case_finding_sql_data.get('transgender_total', default_value)['sort_key'] }] chart2 = MultiBarChart(_('Cases By Type'), x_axis=Axis(''), y_axis=Axis('')) chart2.stacked = False chart2.showLegend = False positive_smear = case_finding_sql_data.get('new_positive_tb_pulmonary', default_value)['sort_key'] negative_smear = case_finding_sql_data.get('new_negative_tb_pulmonary', default_value)['sort_key'] positive_extra_pulmonary = case_finding_sql_data.get( 'new_positive_tb_extrapulmonary', default_value)['sort_key'] relapse_cases = case_finding_sql_data.get('recurrent_positive_tb', default_value)['sort_key'] failure_cases = case_finding_sql_data.get('failure_positive_tb', default_value)['sort_key'] lfu_cases = case_finding_sql_data.get('lfu_positive_tb', default_value)['sort_key'] others_cases = case_finding_sql_data.get('others_positive_tb', default_value)['sort_key'] chart2.add_dataset(_('New'), [{ 'x': 'Smear +ve', 'y': positive_smear }, { 'x': 'Smear -ve', 'y': negative_smear }, { 'x': 'EP', 'y': positive_extra_pulmonary }]) chart2.add_dataset(_('Retreatment'), [{ 'x': 'Relapse', 'y': relapse_cases }, { 'x': 'Failure', 'y': failure_cases }, { 'x': 'Treatment After Default', 'y': lfu_cases }, { 'x': 'Others', 'y': others_cases }]) chart3 = MultiBarChart('Sputum Conversion By Patient Type', Axis(''), Axis('')) chart3.stacked = True chart3.add_dataset('Positive', [ { 'x': _('New Sputum +ve (2 month IP)'), 'y': sputum_conversion_data.get( 'new_sputum_positive_patient_2months_ip', 0) }, { 'x': _('New Sputum +ve (3 month IP)'), 'y': sputum_conversion_data.get( 'new_sputum_positive_patient_3months_ip', 0) }, { 'x': _('Cat II (3 month IP)'), 'y': sputum_conversion_data.get('positive_endofip_patients_cat2', 0) }, ]) chart3.add_dataset(_('Negative'), [ { 'x': _('New Sputum +ve (2 month IP)'), 'y': sputum_conversion_data.get( 'new_sputum_negative_patient_2months_ip', 0) }, { 'x': _('New Sputum +ve (3 month IP)'), 'y': sputum_conversion_data.get( 'new_sputum_negative_patient_3months_ip', 0) }, { 'x': _('Cat II (3 month IP)'), 'y': sputum_conversion_data.get('negative_endofip_patients_cat2', 0) }, ]) chart3.add_dataset('NA', [ { 'x': _('New Sputum +ve (2 month IP)'), 'y': sputum_conversion_data.get('new_sputum_na_patient_2months_ip', 0) }, { 'x': _('New Sputum +ve (3 month IP)'), 'y': sputum_conversion_data.get('new_sputum_na_patient_3months_ip', 0) }, { 'x': _('Cat II (3 month IP)'), 'y': sputum_conversion_data.get('na_endofip_patients_cat2', 0) }, ]) chart4 = PieChart(title=_('Total number of patients by category'), key='', values=[]) chart4.data = [{ 'label': _('Cat1'), 'value': charts_sql_data.get('cat1_patients', default_value)['sort_key'] }, { 'label': _('Cat2'), 'value': charts_sql_data.get('cat2_patients', default_value)['sort_key'] }] chart5 = MultiBarChart('Outcome By Type', Axis(''), Axis('')) chart5.stacked = True chart5.add_dataset(_('Cured'), [{ 'x': _('New'), 'y': treatment_outcome_sql_data.get('new_patients_cured', default_value)['sort_key'] }, { 'x': _('Retreatment'), 'y': treatment_outcome_sql_data.get('recurrent_patients_cured', default_value)['sort_key'] }]) chart5.add_dataset('Treatment Complete', [{ 'x': _('New'), 'y': treatment_outcome_sql_data.get('new_patients_treatment_complete', default_value)['sort_key'] }, { 'x': _('Retreatment'), 'y': treatment_outcome_sql_data.get( 'recurrent_patients_treatment_complete', default_value)['sort_key'] }]) chart5.add_dataset('Died', [{ 'x': _('New'), 'y': treatment_outcome_sql_data.get('new_patients_died', default_value)['sort_key'] }, { 'x': _('Retreatment'), 'y': treatment_outcome_sql_data.get('recurrent_patients_died', default_value)['sort_key'] }]) chart5.add_dataset(_('Failure'), [{ 'x': _('New'), 'y': treatment_outcome_sql_data.get('new_patients_treatment_failure', default_value)['sort_key'] }, { 'x': _('Retreatment'), 'y': treatment_outcome_sql_data.get( 'recurrent_patients_treatment_failure', default_value)['sort_key'] }]) chart5.add_dataset(_('Loss to Follow-up'), [{ 'x': _('New'), 'y': treatment_outcome_sql_data.get('new_patients_loss_to_follow_up', default_value)['sort_key'] }, { 'x': _('Retreatment'), 'y': treatment_outcome_sql_data.get( 'recurrent_patients_loss_to_follow_up', default_value)['sort_key'] }]) chart5.add_dataset(_('Regimen Changed'), [{ 'x': _('New'), 'y': treatment_outcome_sql_data.get('new_patients_regimen_changed', default_value)['sort_key'] }, { 'x': _('Retreatment'), 'y': treatment_outcome_sql_data.get( 'recurrent_patients_regimen_changed', default_value)['sort_key'] }]) chart5.add_dataset('Not Evaluated', [{ 'x': _('New'), 'y': treatment_outcome_sql_data.get('new_patients_not_evaluated', default_value)['sort_key'] }, { 'x': _('Retreatment'), 'y': treatment_outcome_sql_data.get('recurrent_patients_not_evaluated', default_value)['sort_key'] }]) return [chart, chart2, chart3, chart4, chart5]
def __init__(self, domain, filters, report_id): report_config = ConfigurableReportDataSource.from_spec( self._get_static_report_configuration_without_owner_transform(report_id.format(domain=domain), domain) ) report_config.set_filter_values(filters) self.report_config = report_config
def data_source(self): report = ConfigurableReportDataSource.from_spec( self.spec, include_prefilters=True) report.lang = self.lang return report
def __init__(self, domain, filters, report_id): report_config = ConfigurableReportDataSource.from_spec( self._get_static_report_configuration_without_owner_transform(report_id.format(domain=domain), domain) ) report_config.set_filter_values(filters) self.report_config = report_config
def _get_report_and_data_source(report_id, domain): report = get_report_config(report_id, domain)[0] data_source = ConfigurableReportDataSource.from_spec(report, include_prefilters=True) return report, data_source
def test_total_row(self): rows = self._add_some_rows(3) report_data_source = ConfigurableReportDataSource.from_spec(self.report_config) total_number = sum(row.number for row in rows) self.assertEqual(report_data_source.get_total_row(), ['Total', total_number, '', '', ''])
def data_source(self): report = ConfigurableReportDataSource.from_spec(self.spec, include_prefilters=True) report.lang = self.lang return report
def __init__(self, domain, filters, report_id, override_agg_column=None): report_config = ConfigurableReportDataSource.from_spec( self._get_static_report_configuration_without_owner_transform(report_id, domain, override_agg_column) ) report_config.set_filter_values(filters) self.report_config = report_config
def setUp(self): super(ReportTranslationTest, self).setUp() report = ReportConfiguration.by_domain(self.DOMAIN)[0] self.report_source = ConfigurableReportDataSource.from_spec(report)