コード例 #1
0
ファイル: test_pillow.py プロジェクト: saketkanth/commcare-hq
class IndicatorPillowTestBase(TestCase):

    def setUp(self):
        self.config = get_sample_data_source()
        self.config.save()
        self.adapter = IndicatorSqlAdapter(self.config)
        self.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886)

    def tearDown(self):
        self.config.delete()
        self.adapter.drop_table()

    @patch('corehq.apps.userreports.specs.datetime')
    def _check_sample_doc_state(self, expected_indicators, datetime_mock):
        datetime_mock.utcnow.return_value = self.fake_time_now
        self.assertEqual(1, self.adapter.get_query_object().count())
        row = self.adapter.get_query_object()[0]
        for k in row.keys():
            v = getattr(row, k)
            if isinstance(expected_indicators[k], decimal.Decimal):
                self.assertAlmostEqual(expected_indicators[k], v)
            else:
                self.assertEqual(
                    expected_indicators[k], v,
                    'mismatched property: {} (expected {}, was {})'.format(
                        k, expected_indicators[k], v
                    )
                )
コード例 #2
0
ファイル: test_pillow.py プロジェクト: yonglehou/commcare-hq
class IndicatorPillowTestBase(TestCase):
    def setUp(self):
        super(IndicatorPillowTestBase, self).setUp()
        self.config = get_sample_data_source()
        self.config.save()
        self.adapter = IndicatorSqlAdapter(self.config)
        self.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886)

    def tearDown(self):
        self.config.delete()
        self.adapter.drop_table()
        super(IndicatorPillowTestBase, self).tearDown()

    @patch('corehq.apps.userreports.specs.datetime')
    def _check_sample_doc_state(self, expected_indicators, datetime_mock):
        datetime_mock.utcnow.return_value = self.fake_time_now
        self.assertEqual(1, self.adapter.get_query_object().count())
        row = self.adapter.get_query_object()[0]
        for k in row.keys():
            v = getattr(row, k)
            if isinstance(expected_indicators[k], decimal.Decimal):
                self.assertAlmostEqual(expected_indicators[k], v)
            else:
                self.assertEqual(
                    expected_indicators[k], v,
                    'mismatched property: {} (expected {}, was {})'.format(
                        k, expected_indicators[k], v))
コード例 #3
0
ファイル: test_columns.py プロジェクト: ansarbek/commcare-hq
 def test_column_uniqueness_when_truncated(self):
     problem_spec = {
         "display_name": "practicing_lessons",
         "property_name": "long_column",
         "choices": [
             "duplicate_choice_1",
             "duplicate_choice_2",
         ],
         "select_style": "multiple",
         "column_id": "a_very_long_base_selection_column_name_with_limited_room",
         "type": "choice_list",
     }
     data_source_config = DataSourceConfiguration(
         domain='test',
         display_name='foo',
         referenced_doc_type='CommCareCase',
         table_id=uuid.uuid4().hex,
         configured_filter={},
         configured_indicators=[problem_spec],
     )
     adapter = IndicatorSqlAdapter(data_source_config)
     adapter.rebuild_table()
     # ensure we can save data to the table.
     adapter.save({
         '_id': uuid.uuid4().hex,
         'domain': 'test',
         'doc_type': 'CommCareCase',
         'long_column': 'duplicate_choice_1',
     })
     # and query it back
     q = adapter.get_query_object()
     self.assertEqual(1, q.count())
コード例 #4
0
ファイル: views.py プロジェクト: ekush/commcare-hq
def export_data_source(request, domain, config_id):
    config = get_document_or_404(DataSourceConfiguration, domain, config_id)
    adapter = IndicatorSqlAdapter(config)
    q = adapter.get_query_object()
    table = adapter.get_table()

    try:
        params = process_url_params(request.GET, table.columns)
    except UserQueryError as e:
        return HttpResponse(e.message, status=400)

    q = q.filter_by(**params.keyword_filters)
    for sql_filter in params.sql_filters:
        q = q.filter(sql_filter)

    # build export
    def get_table(q):
        yield table.columns.keys()
        for row in q:
            yield row

    fd, path = tempfile.mkstemp()
    with os.fdopen(fd, 'wb') as tmpfile:
        try:
            tables = [[config.table_id, get_table(q)]]
            export_from_tables(tables, tmpfile, params.format)
        except exc.DataError:
            msg = _("There was a problem executing your query, please make "
                    "sure your parameters are valid.")
            return HttpResponse(msg, status=400)
        return export_response(Temp(path), params.format, config.display_name)
コード例 #5
0
    def test_table_population(self):

        adapter = IndicatorSqlAdapter(self.config)
        # Delete and create table
        adapter.rebuild_table()

        # Create a doc
        now = datetime.datetime.now()
        one_hour = datetime.timedelta(hours=1)
        logs = [
            {"start_time": now, "end_time": now + one_hour, "person": "al"},
            {"start_time": now + one_hour, "end_time": now + (one_hour * 2), "person": "chris"},
            {"start_time": now + (one_hour * 2), "end_time": now + (one_hour * 3), "person": "katie"},
        ]
        doc = _test_doc(form={"time_logs": logs})

        # Save this document into the table
        adapter.save(doc)

        # Get rows from the table
        rows = adapter.get_query_object()
        retrieved_logs = [{"start_time": r.start_time, "end_time": r.end_time, "person": r.person} for r in rows]
        # Check those rows against the expected result
        self.assertItemsEqual(
            retrieved_logs, logs, "The repeat data saved in the data source table did not match the expected data!"
        )
コード例 #6
0
ファイル: views.py プロジェクト: ekush/commcare-hq
def export_data_source(request, domain, config_id):
    config = get_document_or_404(DataSourceConfiguration, domain, config_id)
    adapter = IndicatorSqlAdapter(config)
    q = adapter.get_query_object()
    table = adapter.get_table()

    try:
        params = process_url_params(request.GET, table.columns)
    except UserQueryError as e:
        return HttpResponse(e.message, status=400)

    q = q.filter_by(**params.keyword_filters)
    for sql_filter in params.sql_filters:
        q = q.filter(sql_filter)

    # build export
    def get_table(q):
        yield table.columns.keys()
        for row in q:
            yield row

    fd, path = tempfile.mkstemp()
    with os.fdopen(fd, 'wb') as tmpfile:
        try:
            tables = [[config.table_id, get_table(q)]]
            export_from_tables(tables, tmpfile, params.format)
        except exc.DataError:
            msg = _("There was a problem executing your query, please make "
                    "sure your parameters are valid.")
            return HttpResponse(msg, status=400)
        return export_response(Temp(path), params.format, config.display_name)
コード例 #7
0
 def test_column_uniqueness_when_truncated(self):
     problem_spec = {
         "display_name": "practicing_lessons",
         "property_name": "long_column",
         "choices": [
             "duplicate_choice_1",
             "duplicate_choice_2",
         ],
         "select_style": "multiple",
         "column_id":
         "a_very_long_base_selection_column_name_with_limited_room",
         "type": "choice_list",
     }
     data_source_config = DataSourceConfiguration(
         domain='test',
         display_name='foo',
         referenced_doc_type='CommCareCase',
         table_id=uuid.uuid4().hex,
         configured_filter={},
         configured_indicators=[problem_spec],
     )
     adapter = IndicatorSqlAdapter(data_source_config)
     adapter.rebuild_table()
     # ensure we can save data to the table.
     adapter.save({
         '_id': uuid.uuid4().hex,
         'domain': 'test',
         'doc_type': 'CommCareCase',
         'long_column': 'duplicate_choice_1',
     })
     # and query it back
     q = adapter.get_query_object()
     self.assertEqual(1, q.count())
コード例 #8
0
ファイル: views.py プロジェクト: ansarbek/commcare-hq
def export_data_source(request, domain, config_id):
    config, _ = get_datasource_config_or_404(config_id, domain)
    adapter = IndicatorSqlAdapter(config)
    q = adapter.get_query_object()
    table = adapter.get_table()

    try:
        params = process_url_params(request.GET, table.columns)
        allowed_formats = [
            Format.CSV,
            Format.HTML,
            Format.XLS,
            Format.XLS_2007,
        ]
        if params.format not in allowed_formats:
            msg = ugettext_lazy('format must be one of the following: {}').format(', '.join(allowed_formats))
            return HttpResponse(msg, status=400)
    except UserQueryError as e:
        return HttpResponse(e.message, status=400)

    q = q.filter_by(**params.keyword_filters)
    for sql_filter in params.sql_filters:
        q = q.filter(sql_filter)

    # xls format has limit of 65536 rows
    # First row is taken up by headers
    if params.format == Format.XLS and q.count() >= 65535:
        keyword_params = dict(**request.GET)
        # use default format
        if 'format' in keyword_params:
            del keyword_params['format']
        return HttpResponseRedirect(
            '%s?%s' % (
                reverse('export_configurable_data_source', args=[domain, config._id]),
                urlencode(keyword_params)
            )
        )

    # build export
    def get_table(q):
        yield table.columns.keys()
        for row in q:
            yield row

    fd, path = tempfile.mkstemp()
    with os.fdopen(fd, 'wb') as tmpfile:
        try:
            tables = [[config.table_id, get_table(q)]]
            export_from_tables(tables, tmpfile, params.format)
        except exc.DataError:
            msg = ugettext_lazy(
                "There was a problem executing your query, "
                "please make sure your parameters are valid."
            )
            return HttpResponse(msg, status=400)
        return export_response(Temp(path), params.format, config.display_name)
コード例 #9
0
ファイル: views.py プロジェクト: saketkanth/commcare-hq
 def page_context(self):
     config, is_static = get_datasource_config_or_404(self.config_id, self.domain)
     adapter = IndicatorSqlAdapter(config)
     q = adapter.get_query_object()
     return {
         'data_source': config,
         'columns': q.column_descriptions,
         'data': q[:20],
         'total_rows': q.count(),
     }
コード例 #10
0
 def page_context(self):
     context = super(PreviewAggregateUCRView, self).page_context
     adapter = IndicatorSqlAdapter(self.table_definition)
     q = adapter.get_query_object()
     context.update({
         'columns': q.column_descriptions,
         'data': [list(row) for row in q[:20]],
         'total_rows': q.count(),
     })
     return context
コード例 #11
0
 def page_context(self):
     config, is_static = get_datasource_config_or_404(
         self.config_id, self.domain)
     adapter = IndicatorSqlAdapter(config)
     q = adapter.get_query_object()
     return {
         'data_source': config,
         'columns': q.column_descriptions,
         'data': q[:20],
         'total_rows': q.count(),
     }
コード例 #12
0
    def _check_weekly_results(self):
        aggregate_table_adapter = IndicatorSqlAdapter(
            self.weekly_aggregate_table_definition)
        aggregate_table = aggregate_table_adapter.get_table()
        aggregate_query = aggregate_table_adapter.get_query_object()

        doc_id_column = aggregate_table.c['doc_id']
        week_column = aggregate_table.c['week']
        # before december the case should not exist
        self.assertEqual(
            0,
            aggregate_query.filter(doc_id_column == self.case_id,
                                   week_column <= '2017-12-17').count())

        # from the monday in december where the case was opened, it case should exist,
        # but should not be flagged as pregnant
        for monday in ('2017-12-18', '2017-12-25', '2018-01-01'):
            row = aggregate_query.filter(doc_id_column == self.case_id,
                                         week_column == monday).one()
            self.assertEqual(self.case_name, row.name)
            self.assertEqual(1, row.open_in_month)
            self.assertEqual(0, row.pregnant_in_month)
            self.assertEqual(None, row.fu_forms_in_month)

        # from monday of the EDD the case should exist, and be flagged as pregnant
        for monday in ('2018-01-15', '2018-01-22', '2018-01-29'):
            row = aggregate_query.filter(
                doc_id_column == self.case_id,
                week_column == monday,
            ).one()
            self.assertEqual(1, row.open_in_month)
            self.assertEqual(1, row.pregnant_in_month)
            self.assertEqual(None, row.fu_forms_in_month)

        # the monday of the march visit, the should exist, be flagged as pregnant, and there is a form
        row = aggregate_query.filter(doc_id_column == self.case_id,
                                     week_column == '2018-03-12').one()
        self.assertEqual(1, row.open_in_month)
        self.assertEqual(1, row.pregnant_in_month)
        self.assertEqual(1, row.fu_forms_in_month)

        # but the monday after there are no forms again
        row = aggregate_query.filter(doc_id_column == self.case_id,
                                     week_column == '2018-03-19').one()
        self.assertEqual(1, row.open_in_month)
        self.assertEqual(1, row.pregnant_in_month)
        self.assertEqual(None, row.fu_forms_in_month)

        # the week of the april 9, the case should exist, be flagged as pregnant, and there are 2 forms
        row = aggregate_query.filter(doc_id_column == self.case_id,
                                     week_column == '2018-04-09').one()
        self.assertEqual(1, row.open_in_month)
        self.assertEqual(1, row.pregnant_in_month)
        self.assertEqual(2, row.fu_forms_in_month)
コード例 #13
0
def export_data_source(request, domain, config_id):
    config, _ = get_datasource_config_or_404(config_id, domain)
    adapter = IndicatorSqlAdapter(config)
    q = adapter.get_query_object()
    table = adapter.get_table()

    try:
        params = process_url_params(request.GET, table.columns)
        allowed_formats = [
            Format.CSV,
            Format.HTML,
            Format.XLS,
            Format.XLS_2007,
        ]
        if params.format not in allowed_formats:
            msg = ugettext_lazy(
                'format must be one of the following: {}').format(
                    ', '.join(allowed_formats))
            return HttpResponse(msg, status=400)
    except UserQueryError as e:
        return HttpResponse(e.message, status=400)

    q = q.filter_by(**params.keyword_filters)
    for sql_filter in params.sql_filters:
        q = q.filter(sql_filter)

    # xls format has limit of 65536 rows
    # First row is taken up by headers
    if params.format == Format.XLS and q.count() >= 65535:
        keyword_params = dict(**request.GET)
        # use default format
        if 'format' in keyword_params:
            del keyword_params['format']
        return HttpResponseRedirect(
            '%s?%s' %
            (reverse('export_configurable_data_source',
                     args=[domain, config._id]), urlencode(keyword_params)))

    # build export
    def get_table(q):
        yield table.columns.keys()
        for row in q:
            yield row

    fd, path = tempfile.mkstemp()
    with os.fdopen(fd, 'wb') as tmpfile:
        try:
            tables = [[config.table_id, get_table(q)]]
            export_from_tables(tables, tmpfile, params.format)
        except exc.DataError:
            msg = ugettext_lazy("There was a problem executing your query, "
                                "please make sure your parameters are valid.")
            return HttpResponse(msg, status=400)
        return export_response(Temp(path), params.format, config.display_name)
コード例 #14
0
ファイル: views.py プロジェクト: ekush/commcare-hq
def preview_data_source(request, domain, config_id):
    config, is_static = get_datasource_config_or_404(config_id, domain)
    adapter = IndicatorSqlAdapter(config)
    q = adapter.get_query_object()
    context = _shared_context(domain)
    context.update({
        'data_source': config,
        'columns': q.column_descriptions,
        'data': q[:20],
        'total_rows': q.count(),
    })
    return render(request, "userreports/preview_data.html", context)
コード例 #15
0
ファイル: views.py プロジェクト: ansarbek/commcare-hq
def preview_data_source(request, domain, config_id):
    config, is_static = get_datasource_config_or_404(config_id, domain)
    adapter = IndicatorSqlAdapter(config)
    q = adapter.get_query_object()
    context = _shared_context(domain)
    context.update({
        'data_source': config,
        'columns': q.column_descriptions,
        'data': q[:20],
        'total_rows': q.count(),
    })
    return render(request, "userreports/preview_data.html", context)
コード例 #16
0
    def _check_basic_results(self):
        aggregate_table_adapter = IndicatorSqlAdapter(
            self.basic_aggregate_table_definition)
        aggregate_table = aggregate_table_adapter.get_table()
        aggregate_query = aggregate_table_adapter.get_query_object()

        doc_id_column = aggregate_table.c['doc_id']

        # before december the case should not exist
        self.assertEqual(
            1,
            aggregate_query.filter(doc_id_column == self.case_id, ).count())

        row = aggregate_query.filter(doc_id_column == self.case_id, ).one()
        self.assertEqual(self.case_name, row.name)
        self.assertEqual('2018-01-21', row.pregnancy_start_date)
        self.assertEqual(3, row.fu_forms)
コード例 #17
0
    def _check_basic_results(self):
        aggregate_table_adapter = IndicatorSqlAdapter(self.basic_aggregate_table_definition)
        aggregate_table = aggregate_table_adapter.get_table()
        aggregate_query = aggregate_table_adapter.get_query_object()

        doc_id_column = aggregate_table.c['doc_id']

        # before december the case should not exist
        self.assertEqual(1, aggregate_query.filter(
            doc_id_column == self.case_id,
        ).count())

        row = aggregate_query.filter(
            doc_id_column == self.case_id,
        ).one()
        self.assertEqual(self.case_name, row.name)
        self.assertEqual('2018-01-21', row.pregnancy_start_date)
        self.assertEqual(3, row.fu_forms)
コード例 #18
0
    def test_table_population(self):

        adapter = IndicatorSqlAdapter(self.config)
        # Delete and create table
        adapter.rebuild_table()

        # Create a doc
        now = datetime.datetime.now()
        one_hour = datetime.timedelta(hours=1)
        logs = [
            {
                "start_time": now,
                "end_time": now + one_hour,
                "person": "al"
            },
            {
                "start_time": now + one_hour,
                "end_time": now + (one_hour * 2),
                "person": "chris"
            },
            {
                "start_time": now + (one_hour * 2),
                "end_time": now + (one_hour * 3),
                "person": "katie"
            },
        ]
        doc = _test_doc(form={'time_logs': logs})

        # Save this document into the table
        adapter.save(doc)

        # Get rows from the table
        rows = adapter.get_query_object()
        retrieved_logs = [{
            'start_time': r.start_time,
            'end_time': r.end_time,
            'person': r.person,
        } for r in rows]
        # Check those rows against the expected result
        self.assertItemsEqual(
            retrieved_logs, logs,
            "The repeat data saved in the data source table did not match the expected data!"
        )
コード例 #19
0
ファイル: base_test.py プロジェクト: zbidi/commcare-hq
 def _rebuild_table_get_query_object(self):
     rebuild_indicators(self.datasource._id)
     adapter = IndicatorSqlAdapter(self.datasource)
     return adapter.get_query_object()
コード例 #20
0
class IndicatorPillowTest(TestCase):
    def setUp(self):
        self.config = get_sample_data_source()
        self.pillow = ConfigurableIndicatorPillow()
        self.pillow.bootstrap(configs=[self.config])
        self.adapter = IndicatorSqlAdapter(self.config)
        self.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886)

    def tearDown(self):
        self.adapter.drop_table()

    def test_filter(self):
        # note: this is a silly test now that python_filter always returns true
        not_matching = [
            dict(doc_type="NotCommCareCase",
                 domain='user-reports',
                 type='ticket'),
            dict(doc_type="CommCareCase",
                 domain='not-user-reports',
                 type='ticket'),
            dict(doc_type="CommCareCase",
                 domain='user-reports',
                 type='not-ticket'),
        ]
        for document in not_matching:
            self.assertTrue(self.pillow.python_filter(document))

        self.assertTrue(
            self.pillow.python_filter(
                dict(doc_type="CommCareCase",
                     domain='user-reports',
                     type='ticket')))

    @patch('corehq.apps.userreports.specs.datetime')
    def test_change_transport(self, datetime_mock):
        datetime_mock.utcnow.return_value = self.fake_time_now
        sample_doc, _ = get_sample_doc_and_indicators(self.fake_time_now)
        self.pillow.change_transport(sample_doc)
        self._check_sample_doc_state()

    @patch('corehq.apps.userreports.specs.datetime')
    def test_rebuild_indicators(self, datetime_mock):
        datetime_mock.utcnow.return_value = self.fake_time_now
        self.config.save()
        sample_doc, _ = get_sample_doc_and_indicators(self.fake_time_now)
        CommCareCase.get_db().save_doc(sample_doc)
        rebuild_indicators(self.config._id)
        self._check_sample_doc_state()

    def test_bad_integer_datatype(self):
        self.config.save()
        bad_ints = ['a', '', None]
        for bad_value in bad_ints:
            self.pillow.change_transport({
                '_id': uuid.uuid4().hex,
                'doc_type': 'CommCareCase',
                'domain': 'user-reports',
                'type': 'ticket',
                'priority': bad_value
            })
        # make sure we saved rows to the table for everything
        self.assertEqual(len(bad_ints),
                         self.adapter.get_query_object().count())

    @patch('corehq.apps.userreports.specs.datetime')
    def _check_sample_doc_state(self, datetime_mock):
        datetime_mock.utcnow.return_value = self.fake_time_now
        _, expected_indicators = get_sample_doc_and_indicators(
            self.fake_time_now)
        self.assertEqual(1, self.adapter.get_query_object().count())
        row = self.adapter.get_query_object()[0]
        for k in row.keys():
            v = getattr(row, k)
            if isinstance(expected_indicators[k], decimal.Decimal):
                self.assertAlmostEqual(expected_indicators[k], v)
            else:
                self.assertEqual(expected_indicators[k], v)
コード例 #21
0
ファイル: test_pillow.py プロジェクト: johan--/commcare-hq
class IndicatorPillowTest(TestCase):

    def setUp(self):
        self.config = get_sample_data_source()
        self.config.save()
        self.pillow = ConfigurableIndicatorPillow()
        self.pillow.bootstrap(configs=[self.config])
        self.adapter = IndicatorSqlAdapter(self.config)
        self.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886)

    def tearDown(self):
        self.config.delete()
        self.adapter.drop_table()

    def test_filter(self):
        # note: this is a silly test now that python_filter always returns true
        not_matching = [
            dict(doc_type="NotCommCareCase", domain='user-reports', type='ticket'),
            dict(doc_type="CommCareCase", domain='not-user-reports', type='ticket'),
            dict(doc_type="CommCareCase", domain='user-reports', type='not-ticket'),
        ]
        for document in not_matching:
            self.assertTrue(self.pillow.python_filter(document))

        self.assertTrue(self.pillow.python_filter(
            dict(doc_type="CommCareCase", domain='user-reports', type='ticket')
        ))

    def test_stale_rebuild(self):
        later_config = copy(self.config)
        later_config.save()
        self.assertNotEqual(self.config._rev, later_config._rev)
        with self.assertRaises(StaleRebuildError):
            self.pillow.rebuild_table(IndicatorSqlAdapter(self.config))

    @patch('corehq.apps.userreports.specs.datetime')
    def test_change_transport(self, datetime_mock):
        datetime_mock.utcnow.return_value = self.fake_time_now
        sample_doc, _ = get_sample_doc_and_indicators(self.fake_time_now)
        self.pillow.change_transport(sample_doc)
        self._check_sample_doc_state()

    @patch('corehq.apps.userreports.specs.datetime')
    def test_rebuild_indicators(self, datetime_mock):
        datetime_mock.utcnow.return_value = self.fake_time_now
        self.config.save()
        sample_doc, _ = get_sample_doc_and_indicators(self.fake_time_now)
        CommCareCase.get_db().save_doc(sample_doc)
        rebuild_indicators(self.config._id)
        self._check_sample_doc_state()

    def test_bad_integer_datatype(self):
        self.config.save()
        bad_ints = ['a', '', None]
        for bad_value in bad_ints:
            self.pillow.change_transport({
                '_id': uuid.uuid4().hex,
                'doc_type': 'CommCareCase',
                'domain': 'user-reports',
                'type': 'ticket',
                'priority': bad_value
            })
        # make sure we saved rows to the table for everything
        self.assertEqual(len(bad_ints), self.adapter.get_query_object().count())

    @patch('corehq.apps.userreports.specs.datetime')
    def _check_sample_doc_state(self, datetime_mock):
        datetime_mock.utcnow.return_value = self.fake_time_now
        _, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now)
        self.assertEqual(1, self.adapter.get_query_object().count())
        row = self.adapter.get_query_object()[0]
        for k in row.keys():
            v = getattr(row, k)
            if isinstance(expected_indicators[k], decimal.Decimal):
                self.assertAlmostEqual(expected_indicators[k], v)
            else:
                self.assertEqual(expected_indicators[k], v)
コード例 #22
0
 def _get_query_object(self):
     adapter = IndicatorSqlAdapter(self.datasource)
     return adapter.get_query_object()
コード例 #23
0
class IndicatorPillowTest(TestCase):
    def setUp(self):
        self.config = get_sample_data_source()
        self.config.save()
        self.pillow = ConfigurableIndicatorPillow()
        self.pillow.bootstrap(configs=[self.config])
        self.adapter = IndicatorSqlAdapter(self.config)
        self.fake_time_now = datetime(2015, 4, 24, 12, 30, 8, 24886)

    def tearDown(self):
        self.config.delete()
        self.adapter.drop_table()

    def test_stale_rebuild(self):
        later_config = copy(self.config)
        later_config.save()
        self.assertNotEqual(self.config._rev, later_config._rev)
        with self.assertRaises(StaleRebuildError):
            self.pillow.rebuild_table(IndicatorSqlAdapter(self.config))

    @patch("corehq.apps.userreports.specs.datetime")
    def test_change_transport(self, datetime_mock):
        datetime_mock.utcnow.return_value = self.fake_time_now
        sample_doc, _ = get_sample_doc_and_indicators(self.fake_time_now)
        self.pillow.change_transport(sample_doc)
        self._check_sample_doc_state()

    @patch("corehq.apps.userreports.specs.datetime")
    def test_rebuild_indicators(self, datetime_mock):
        datetime_mock.utcnow.return_value = self.fake_time_now
        self.config.save()
        sample_doc, _ = get_sample_doc_and_indicators(self.fake_time_now)
        CommCareCase.get_db().save_doc(sample_doc)
        rebuild_indicators(self.config._id)
        self._check_sample_doc_state()

    def test_bad_integer_datatype(self):
        self.config.save()
        bad_ints = ["a", "", None]
        for bad_value in bad_ints:
            self.pillow.change_transport(
                {
                    "_id": uuid.uuid4().hex,
                    "doc_type": "CommCareCase",
                    "domain": "user-reports",
                    "type": "ticket",
                    "priority": bad_value,
                }
            )
        # make sure we saved rows to the table for everything
        self.assertEqual(len(bad_ints), self.adapter.get_query_object().count())

    @patch("corehq.apps.userreports.specs.datetime")
    def _check_sample_doc_state(self, datetime_mock):
        datetime_mock.utcnow.return_value = self.fake_time_now
        _, expected_indicators = get_sample_doc_and_indicators(self.fake_time_now)
        self.assertEqual(1, self.adapter.get_query_object().count())
        row = self.adapter.get_query_object()[0]
        for k in row.keys():
            v = getattr(row, k)
            if isinstance(expected_indicators[k], decimal.Decimal):
                self.assertAlmostEqual(expected_indicators[k], v)
            else:
                self.assertEqual(expected_indicators[k], v)
コード例 #24
0
    def _check_weekly_results(self):
        aggregate_table_adapter = IndicatorSqlAdapter(self.weekly_aggregate_table_definition)
        aggregate_table = aggregate_table_adapter.get_table()
        aggregate_query = aggregate_table_adapter.get_query_object()

        doc_id_column = aggregate_table.c['doc_id']
        week_column = aggregate_table.c['week']
        # before december the case should not exist
        self.assertEqual(0, aggregate_query.filter(
            doc_id_column == self.case_id,
            week_column <= '2017-12-17'
        ).count())

        # from the monday in december where the case was opened, it case should exist,
        # but should not be flagged as pregnant
        for monday in ('2017-12-18', '2017-12-25', '2018-01-01'):
            row = aggregate_query.filter(
                doc_id_column == self.case_id,
                week_column == monday
            ).one()
            self.assertEqual(self.case_name, row.name)
            self.assertEqual(1, row.open_in_month)
            self.assertEqual(0, row.pregnant_in_month)
            self.assertEqual(None, row.fu_forms_in_month)

        # from monday of the EDD the case should exist, and be flagged as pregnant
        for monday in ('2018-01-15', '2018-01-22', '2018-01-29'):
            row = aggregate_query.filter(
                doc_id_column == self.case_id,
                week_column == monday,
            ).one()
            self.assertEqual(1, row.open_in_month)
            self.assertEqual(1, row.pregnant_in_month)
            self.assertEqual(None, row.fu_forms_in_month)

        # the monday of the march visit, the should exist, be flagged as pregnant, and there is a form
        row = aggregate_query.filter(
            doc_id_column == self.case_id,
            week_column == '2018-03-12'
        ).one()
        self.assertEqual(1, row.open_in_month)
        self.assertEqual(1, row.pregnant_in_month)
        self.assertEqual(1, row.fu_forms_in_month)

        # but the monday after there are no forms again
        row = aggregate_query.filter(
            doc_id_column == self.case_id,
            week_column == '2018-03-19'
        ).one()
        self.assertEqual(1, row.open_in_month)
        self.assertEqual(1, row.pregnant_in_month)
        self.assertEqual(None, row.fu_forms_in_month)

        # the week of the april 9, the case should exist, be flagged as pregnant, and there are 2 forms
        row = aggregate_query.filter(
            doc_id_column == self.case_id,
            week_column == '2018-04-09'
        ).one()
        self.assertEqual(1, row.open_in_month)
        self.assertEqual(1, row.pregnant_in_month)
        self.assertEqual(2, row.fu_forms_in_month)