コード例 #1
0
ファイル: forms.py プロジェクト: ansarbek/commcare-hq
    def update_report(self):
        from corehq.apps.userreports.views import delete_data_source_shared

        matching_data_source = self.ds_builder.get_existing_match()
        if matching_data_source:
            reactivated = False
            if matching_data_source._id != self.existing_report.config_id:

                # If no one else is using the current data source, delete it.
                data_source = DataSourceConfiguration.get(self.existing_report.config_id)
                if data_source.get_report_count() <= 1:
                    data_source.deactivate()

                self.existing_report.config_id = matching_data_source._id
            elif matching_data_source.is_deactivated:
                existing_sources = DataSourceConfiguration.by_domain(self.domain)
                active_sources = filter(lambda config: not config.is_deactivated, existing_sources)
                if len(active_sources) >= 5:
                    raise forms.ValidationError(_(
                        "Editing this report would require a new data source. The limit is 5. "
                        "To continue, first delete all of the reports using a particular "
                        "data source (or the data source itself) and try again. "
                    ))
                matching_data_source.is_deactivated = False
                reactivated = True
            changed = False
            indicators = self.ds_builder.indicators(self._number_columns)
            if matching_data_source.configured_indicators != indicators:
                matching_data_source.configured_indicators = indicators
                changed = True
            if changed or reactivated:
                matching_data_source.save()
                tasks.rebuild_indicators.delay(matching_data_source._id)
        else:
            # We need to create a new data source
            existing_sources = DataSourceConfiguration.by_domain(self.domain)
            active_sources = filter(lambda config: not config.is_deactivated, existing_sources)

            # Delete the old one if no other reports use it
            old_data_source = DataSourceConfiguration.get(self.existing_report.config_id)
            if old_data_source.get_report_count() <= 1:
                old_data_source.deactivate()

            # Make sure the user can create more data sources
            elif len(active_sources) >= 5:
                raise forms.ValidationError(_(
                    "Editing this report would require a new data source. The limit is 5. "
                    "To continue, first delete all of the reports using a particular "
                    "data source (or the data source itself) and try again. "
                ))
            data_source_config_id = self._build_data_source()
            self.existing_report.config_id = data_source_config_id

        self.existing_report.aggregation_columns = self._report_aggregation_cols
        self.existing_report.columns = self._report_columns
        self.existing_report.filters = self._report_filters
        self.existing_report.configured_charts = self._report_charts
        self.existing_report.validate()
        self.existing_report.save()
        return self.existing_report
コード例 #2
0
ファイル: tasks.py プロジェクト: ekush/commcare-hq
def rebuild_indicators(indicator_config_id):
    is_static = indicator_config_id.startswith(
        CustomDataSourceConfiguration._datasource_id_prefix)
    if is_static:
        config = CustomDataSourceConfiguration.by_id(indicator_config_id)
    else:
        config = DataSourceConfiguration.get(indicator_config_id)
        # Save the start time now in case anything goes wrong. This way we'll be
        # able to see if the rebuild started a long time ago without finishing.
        config.meta.build.initiated = datetime.datetime.utcnow()
        config.save()

    adapter = IndicatorSqlAdapter(config)
    adapter.rebuild_table()

    couchdb = _get_db(config.referenced_doc_type)
    relevant_ids = get_doc_ids(config.domain,
                               config.referenced_doc_type,
                               database=couchdb)

    for doc in iter_docs(couchdb, relevant_ids, chunksize=500):
        try:
            # save is a noop if the filter doesn't match
            adapter.save(doc)
        except DataError as e:
            logging.exception('problem saving document {} to table. {}'.format(
                doc['_id'], e))

    if not is_static:
        config.meta.build.finished = True
        config.save()
コード例 #3
0
def _iteratively_build_table(config, last_id=None, resume_helper=None):
    resume_helper = resume_helper or DataSourceResumeHelper(config)
    indicator_config_id = config._id

    relevant_ids = []
    document_store = get_document_store(config.domain,
                                        config.referenced_doc_type)
    for relevant_id in document_store.iter_document_ids(last_id):
        relevant_ids.append(relevant_id)
        if len(relevant_ids) >= ID_CHUNK_SIZE:
            resume_helper.set_ids_to_resume_from(relevant_ids)
            _build_indicators(config, document_store, relevant_ids,
                              resume_helper)
            relevant_ids = []

    if relevant_ids:
        resume_helper.set_ids_to_resume_from(relevant_ids)
        _build_indicators(config, document_store, relevant_ids, resume_helper)

    if not id_is_static(indicator_config_id):
        resume_helper.clear_ids()
        config.meta.build.finished = True
        try:
            config.save()
        except ResourceConflict:
            current_config = DataSourceConfiguration.get(config._id)
            # check that a new build has not yet started
            if config.meta.build.initiated == current_config.meta.build.initiated:
                current_config.meta.build.finished = True
                current_config.save()
        adapter = get_indicator_adapter(config,
                                        raise_errors=True,
                                        can_handle_laboratory=True)
        adapter.after_table_build()
コード例 #4
0
ファイル: tasks.py プロジェクト: saketkanth/commcare-hq
def _iteratively_build_table(config, last_id=None, resume_helper=None):
    resume_helper = resume_helper or DataSourceResumeHelper(config)
    indicator_config_id = config._id

    relevant_ids = []
    document_store = get_document_store(config.domain, config.referenced_doc_type)
    for relevant_id in document_store.iter_document_ids(last_id):
        relevant_ids.append(relevant_id)
        if len(relevant_ids) >= ID_CHUNK_SIZE:
            resume_helper.set_ids_to_resume_from(relevant_ids)
            _build_indicators(config, document_store, relevant_ids, resume_helper)
            relevant_ids = []

    if relevant_ids:
        resume_helper.set_ids_to_resume_from(relevant_ids)
        _build_indicators(config, document_store, relevant_ids, resume_helper)

    if not id_is_static(indicator_config_id):
        resume_helper.clear_ids()
        config.meta.build.finished = True
        try:
            config.save()
        except ResourceConflict:
            current_config = DataSourceConfiguration.get(config._id)
            # check that a new build has not yet started
            if config.meta.build.initiated == current_config.meta.build.initiated:
                current_config.meta.build.finished = True
                current_config.save()
コード例 #5
0
ファイル: tasks.py プロジェクト: philipkaare/commcare-hq
def _build_indicators(indicator_config_id, relevant_ids):
    config = _get_config_by_id(indicator_config_id)
    adapter = IndicatorSqlAdapter(config)
    couchdb = _get_db(config.referenced_doc_type)
    redis_client = get_redis_client().client.get_client()
    redis_key = _get_redis_key_for_config(config)

    for doc in iter_docs(couchdb, relevant_ids, chunksize=500):
        try:
            # save is a noop if the filter doesn't match
            adapter.save(doc)
            redis_client.srem(redis_key, doc.get('_id'))
        except Exception as e:
            logging.exception('problem saving document {} to table. {}'.format(doc['_id'], e))

    if not is_static(indicator_config_id):
        redis_client.delete(redis_key)
        config.meta.build.finished = True
        try:
            config.save()
        except ResourceConflict:
            current_config = DataSourceConfiguration.get(config._id)
            # check that a new build has not yet started
            if config.meta.build.initiated == current_config.meta.build.initiated:
                current_config.meta.build.finished = True
                current_config.save()
コード例 #6
0
ファイル: test_pillow.py プロジェクト: kkrampa/commcare-hq
 def test_stale_rebuild(self):
     # rebuild indicators in another test will save this
     later_config = DataSourceConfiguration.get(self.config._id)
     later_config.save()
     self.assertNotEqual(self.config._rev, later_config._rev)
     with self.assertRaises(StaleRebuildError):
         self.pillow.processors[0].rebuild_table(get_indicator_adapter(self.config))
コード例 #7
0
    def test_updating_report_data_source(self):
        """
        Test that changing the app or number column for a report results in an update to the data source next time
        the report is saved.
        """

        # Make report
        builder_form = ConfigureTableReportForm(
            "Test Report",
            self.app._id,
            "case",
            "some_case_type",
            existing_report=None,
            data={
                'group_by': ['closed'],
                'chart':
                'bar',
                'user_filters':
                '[]',
                'default_filters':
                '[]',
                'columns':
                '[{"property": "closed", "display_text": "closed", "calculation": "Count per Choice"}]',
            })
        self.assertTrue(builder_form.is_valid())
        report = builder_form.create_report()

        self.assertEqual(report.config.configured_indicators[0]['datatype'],
                         "string")

        # Make an edit to the first report builder report
        builder_form = ConfigureTableReportForm(
            "Test Report",
            self.app._id,
            "case",
            "some_case_type",
            existing_report=report,
            data={
                'group_by': ['user_id'],
                'chart':
                'bar',
                'user_filters':
                '[]',
                'default_filters':
                '[]',
                # Note that a "Sum" calculation on the closed case property isn't very sensical, but doing it so
                # that I can have a numeric calculation without having to create real case properties for this case
                #  type.
                'columns':
                '[{"property": "closed", "display_text": "closed", "calculation": "Sum"}]',
            })
        self.assertTrue(builder_form.is_valid())
        builder_form.update_report()

        # reload report data source, because report.config is memoized
        data_source = DataSourceConfiguration.get(report.config._id)
        # The closed property indicator should now be decimal type because the user indicated that it was numeric
        # by giving the column the "Sum" aggregation.
        self.assertEqual(data_source.configured_indicators[0]['datatype'],
                         "decimal")
コード例 #8
0
ファイル: test_pillow.py プロジェクト: xbryanc/commcare-hq
 def test_stale_rebuild(self):
     # rebuild indicators in another test will save this
     later_config = DataSourceConfiguration.get(self.config._id)
     later_config.save()
     self.assertNotEqual(self.config._rev, later_config._rev)
     with self.assertRaises(StaleRebuildError):
         self.pillow.rebuild_table(get_indicator_adapter(self.config))
コード例 #9
0
ファイル: tasks.py プロジェクト: sheelio/commcare-hq
def rebuild_indicators(indicator_config_id):
    is_static = indicator_config_id.startswith(CustomDataSourceConfiguration._datasource_id_prefix)
    if is_static:
        config = CustomDataSourceConfiguration.by_id(indicator_config_id)
    else:
        config = DataSourceConfiguration.get(indicator_config_id)
        # Save the start time now in case anything goes wrong. This way we'll be
        # able to see if the rebuild started a long time ago without finishing.
        config.meta.build.initiated = datetime.datetime.utcnow()
        config.save()

    adapter = IndicatorSqlAdapter(config)
    adapter.rebuild_table()

    couchdb = _get_db(config.referenced_doc_type)
    relevant_ids = get_doc_ids(config.domain, config.referenced_doc_type,
                               database=couchdb)

    for doc in iter_docs(couchdb, relevant_ids, chunksize=500):
        try:
            # save is a noop if the filter doesn't match
            adapter.save(doc)
        except DataError as e:
            logging.exception('problem saving document {} to table. {}'.format(doc['_id'], e))

    if not is_static:
        config.meta.build.finished = True
        config.save()
コード例 #10
0
    def test_invalid_data_serial_processor(self, process_changes_chunk):
        # re-fetch from DB to bust object caches
        self.config = DataSourceConfiguration.get(self.config.data_source_id)

        process_changes_chunk.side_effect = Exception
        self.config.validations = [
            Validation.wrap({
                "name": "impossible_condition",
                "error_message": "This condition is impossible to satisfy",
                "expression": {
                    "type": "boolean_expression",
                    "expression": {
                        "type": "property_name",
                        "property_name": "doesnt_exist"
                    },
                    "operator": "in",
                    "property_value": ["nonsense"]
                }
            })
        ]
        self.config.save()

        cases = self._create_and_process_changes()
        num_rows = self.adapter.get_query_object().count()
        self.assertEqual(num_rows, 0)
        invalid_data = InvalidUCRData.objects.all().values_list('doc_id',
                                                                flat=True)
        self.assertEqual(set([case.case_id for case in cases]),
                         set(invalid_data))
コード例 #11
0
 def tearDown(self):
     self.adapter.clear_table()
     delete_all_cases()
     delete_all_xforms()
     InvalidUCRData.objects.all().delete()
     self.config = DataSourceConfiguration.get(self.config.data_source_id)
     self.config.validations = []
     self.config.save()
コード例 #12
0
 def tearDown(self):
     delete_all_cases()
     delete_all_xforms()
     AsyncIndicator.objects.all().delete()
     InvalidUCRData.objects.all().delete()
     self.config = DataSourceConfiguration.get(self.config.data_source_id)
     self.config.validations = []
     self.config.save()
コード例 #13
0
    def test_link_creates_datasource_and_report(self):
        link_info = create_linked_ucr(self.domain_link, self.report.get_id)

        new_datasource = DataSourceConfiguration.get(link_info.datasource.get_id)
        self.assertEqual(new_datasource.domain, self.domain_link.linked_domain)

        new_report = ReportConfiguration.get(link_info.report.get_id)
        self.assertEqual(new_report.domain, self.domain_link.linked_domain)
        self.assertEqual(self.report.get_id, new_report.report_meta.master_id)
コード例 #14
0
def ucr_config(request, domain, config_id):
    report_config = ReportConfiguration.get(config_id)
    datasource_id = report_config.config_id
    datasource_config = DataSourceConfiguration.get(datasource_id)

    return JsonResponse({
        "report": report_config.to_json(),
        "datasource": datasource_config.to_json(),
    })
コード例 #15
0
def _iteratively_build_table(config,
                             resume_helper=None,
                             in_place=False,
                             limit=-1):
    resume_helper = resume_helper or DataSourceResumeHelper(config)
    indicator_config_id = config._id
    case_type_or_xmlns_list = config.get_case_type_or_xmlns_filter()
    completed_ct_xmlns = resume_helper.get_completed_case_type_or_xmlns()
    if completed_ct_xmlns:
        case_type_or_xmlns_list = [
            case_type_or_xmlns
            for case_type_or_xmlns in case_type_or_xmlns_list
            if case_type_or_xmlns not in completed_ct_xmlns
        ]

    for case_type_or_xmlns in case_type_or_xmlns_list:
        relevant_ids = []
        document_store = get_document_store(
            config.domain,
            config.referenced_doc_type,
            case_type_or_xmlns=case_type_or_xmlns)

        for i, relevant_id in enumerate(document_store.iter_document_ids()):
            if i >= limit > -1:
                break
            relevant_ids.append(relevant_id)
            if len(relevant_ids) >= ID_CHUNK_SIZE:
                _build_indicators(config, document_store, relevant_ids)
                relevant_ids = []

        if relevant_ids:
            _build_indicators(config, document_store, relevant_ids)

        resume_helper.add_completed_case_type_or_xmlns(case_type_or_xmlns)

    resume_helper.clear_resume_info()
    if not id_is_static(indicator_config_id):
        if in_place:
            config.meta.build.finished_in_place = True
        else:
            config.meta.build.finished = True
        try:
            config.save()
        except ResourceConflict:
            current_config = DataSourceConfiguration.get(config._id)
            # check that a new build has not yet started
            if in_place:
                if config.meta.build.initiated_in_place == current_config.meta.build.initiated_in_place:
                    current_config.meta.build.finished_in_place = True
            else:
                if config.meta.build.initiated == current_config.meta.build.initiated:
                    current_config.meta.build.finished = True
            current_config.save()
        adapter = get_indicator_adapter(config,
                                        raise_errors=True,
                                        can_handle_laboratory=True)
        adapter.after_table_build()
コード例 #16
0
ファイル: forms.py プロジェクト: ekush/commcare-hq
    def update_report(self):
        from corehq.apps.userreports.views import delete_data_source_shared

        matching_data_source = self.ds_builder.get_existing_match()
        if matching_data_source:
            if matching_data_source['id'] != self.existing_report.config_id:

                # If no one else is using the current data source, delete it.
                data_source = DataSourceConfiguration.get(
                    self.existing_report.config_id)
                if data_source.get_report_count() <= 1:
                    delete_data_source_shared(self.domain, data_source._id)

                self.existing_report.config_id = matching_data_source['id']

        else:
            # We need to create a new data source
            existing_sources = DataSourceConfiguration.by_domain(self.domain)

            # Delete the old one if no other reports use it
            old_data_source = DataSourceConfiguration.get(
                self.existing_report.config_id)
            if old_data_source.get_report_count() <= 1:
                delete_data_source_shared(self.domain, old_data_source._id)

            # Make sure the user can create more data sources
            elif len(existing_sources) >= 5:
                raise forms.ValidationError(
                    _("Editing this report would require a new data source. The limit is 5. "
                      "To continue, first delete all of the reports using a particular "
                      "data source (or the data source itself) and try again. "
                      ))

            data_source_config_id = self._build_data_source()
            self.existing_report.config_id = data_source_config_id

        self.existing_report.aggregation_columns = self._report_aggregation_cols
        self.existing_report.columns = self._report_columns
        self.existing_report.filters = self._report_filters
        self.existing_report.configured_charts = self._report_charts
        self.existing_report.validate()
        self.existing_report.save()
        return self.existing_report
コード例 #17
0
ファイル: ucr.py プロジェクト: solleks/commcare-hq
def create_linked_ucr(domain_link, report_config_id):
    if domain_link.is_remote:
        remote_configs = remote_get_ucr_config(domain_link, report_config_id)
        datasource = remote_configs["datasource"]
        report_config = remote_configs["report"]
    else:
        report_config = ReportConfiguration.get(report_config_id)
        datasource = DataSourceConfiguration.get(report_config.config_id)
    new_datasource = _get_or_create_datasource_link(domain_link, datasource)
    new_report = _get_or_create_report_link(domain_link, report_config, new_datasource)
    return LinkedUCRInfo(datasource=new_datasource, report=new_report)
コード例 #18
0
ファイル: forms.py プロジェクト: tlwakwella/commcare-hq
    def update_report(self):
        from corehq.apps.userreports.views import delete_data_source_shared

        matching_data_source = self.ds_builder.get_existing_match()
        if matching_data_source:
            reactivated = False
            if matching_data_source._id != self.existing_report.config_id:

                # If no one else is using the current data source, delete it.
                data_source = DataSourceConfiguration.get(self.existing_report.config_id)
                if data_source.get_report_count() <= 1:
                    data_source.deactivate()

                self.existing_report.config_id = matching_data_source._id
            elif matching_data_source.is_deactivated:
                matching_data_source.is_deactivated = False
                reactivated = True
            changed = False
            indicators = self.ds_builder.indicators(self._number_columns)
            if matching_data_source.configured_indicators != indicators:
                matching_data_source.configured_indicators = indicators
                changed = True
            if changed or reactivated:
                matching_data_source.save()
                tasks.rebuild_indicators.delay(matching_data_source._id)
        else:
            # Delete the old one if no other reports use it
            old_data_source = DataSourceConfiguration.get(self.existing_report.config_id)
            if old_data_source.get_report_count() <= 1:
                old_data_source.deactivate()

            data_source_config_id = self._build_data_source()
            self.existing_report.config_id = data_source_config_id

        self.existing_report.aggregation_columns = self._report_aggregation_cols
        self.existing_report.columns = self._report_columns
        self.existing_report.filters = self._report_filters
        self.existing_report.configured_charts = self._report_charts
        self.existing_report.validate()
        self.existing_report.save()
        return self.existing_report
コード例 #19
0
ファイル: forms.py プロジェクト: johan--/commcare-hq
    def update_report(self):
        from corehq.apps.userreports.views import delete_data_source_shared

        matching_data_source = self.ds_builder.get_existing_match()
        if matching_data_source:
            if matching_data_source['id'] != self.existing_report.config_id:

                # If no one else is using the current data source, delete it.
                data_source = DataSourceConfiguration.get(self.existing_report.config_id)
                if data_source.get_report_count() <= 1:
                    delete_data_source_shared(self.domain, data_source._id)

                self.existing_report.config_id = matching_data_source['id']

        else:
            # We need to create a new data source
            existing_sources = DataSourceConfiguration.by_domain(self.domain)

            # Delete the old one if no other reports use it
            old_data_source = DataSourceConfiguration.get(self.existing_report.config_id)
            if old_data_source.get_report_count() <= 1:
                delete_data_source_shared(self.domain, old_data_source._id)

            # Make sure the user can create more data sources
            elif len(existing_sources) >= 5:
                raise forms.ValidationError(_(
                    "Editing this report would require a new data source. The limit is 5. "
                    "To continue, first delete all of the reports using a particular "
                    "data source (or the data source itself) and try again. "
                ))

            data_source_config_id = self._build_data_source()
            self.existing_report.config_id = data_source_config_id

        self.existing_report.aggregation_columns = self._report_aggregation_cols
        self.existing_report.columns = self._report_columns
        self.existing_report.filters = self._report_filters
        self.existing_report.configured_charts = self._report_charts
        self.existing_report.validate()
        self.existing_report.save()
        return self.existing_report
コード例 #20
0
    def test_async_invalid_data(self):
        # re-fetch from DB to bust object caches
        self.config = DataSourceConfiguration.get(self.config.data_source_id)

        self.config.validations = [
            Validation.wrap({
                "name": "impossible_condition",
                "error_message": "This condition is impossible to satisfy",
                "expression": {
                    "type": "boolean_expression",
                    "expression": {
                        "type": "property_name",
                        "property_name": "doesnt_exist"
                    },
                    "operator": "in",
                    "property_value": ["nonsense"]
                }
            })
        ]

        self.config.save()
        parent_id, child_id = uuid.uuid4().hex, uuid.uuid4().hex
        since = self.pillow.get_change_feed().get_latest_offsets()
        for i in range(3):
            form, cases = post_case_blocks([
                CaseBlock.deprecated_init(
                    create=i == 0,
                    case_id=parent_id,
                    case_name='parent-name',
                    case_type='bug',
                    update={
                        'update-prop-parent': i
                    },
                ).as_xml(),
                CaseBlock.deprecated_init(create=i == 0,
                                          case_id=child_id,
                                          case_name='child-name',
                                          case_type='bug-child',
                                          index={
                                              'parent': ('bug', parent_id)
                                          },
                                          update={
                                              'update-prop-child': i
                                          }).as_xml()
            ],
                                           domain=self.domain)
        self.pillow.process_changes(since=since, forever=False)

        # run async queue
        queue_async_indicators()
        self.assertEqual(InvalidUCRData.objects.count(), 1)
コード例 #21
0
ファイル: tasks.py プロジェクト: dimagi/commcare-hq
def _iteratively_build_table(config, resume_helper=None, in_place=False, limit=-1):
    resume_helper = resume_helper or DataSourceResumeHelper(config)
    indicator_config_id = config._id
    case_type_or_xmlns_list = config.get_case_type_or_xmlns_filter()
    completed_ct_xmlns = resume_helper.get_completed_case_type_or_xmlns()
    if completed_ct_xmlns:
        case_type_or_xmlns_list = [
            case_type_or_xmlns
            for case_type_or_xmlns in case_type_or_xmlns_list
            if case_type_or_xmlns not in completed_ct_xmlns
        ]

    for case_type_or_xmlns in case_type_or_xmlns_list:
        relevant_ids = []
        document_store = get_document_store_for_doc_type(
            config.domain, config.referenced_doc_type,
            case_type_or_xmlns=case_type_or_xmlns,
            load_source="build_indicators",
        )

        for i, relevant_id in enumerate(document_store.iter_document_ids()):
            if i >= limit > -1:
                break
            relevant_ids.append(relevant_id)
            if len(relevant_ids) >= ID_CHUNK_SIZE:
                _build_indicators(config, document_store, relevant_ids)
                relevant_ids = []

        if relevant_ids:
            _build_indicators(config, document_store, relevant_ids)

        resume_helper.add_completed_case_type_or_xmlns(case_type_or_xmlns)

    resume_helper.clear_resume_info()
    if not id_is_static(indicator_config_id):
        if in_place:
            config.meta.build.finished_in_place = True
        else:
            config.meta.build.finished = True
        try:
            config.save()
        except ResourceConflict:
            current_config = DataSourceConfiguration.get(config._id)
            # check that a new build has not yet started
            if in_place:
                if config.meta.build.initiated_in_place == current_config.meta.build.initiated_in_place:
                    current_config.meta.build.finished_in_place = True
            else:
                if config.meta.build.initiated == current_config.meta.build.initiated:
                    current_config.meta.build.finished = True
            current_config.save()
コード例 #22
0
    def test_updating_report_data_source(self):
        """
        Test that changing the app or number column for a report results in an update to the data source next time
        the report is saved.
        """

        # Make report
        builder_form = ConfigureTableReportForm(
            "Test Report",
            self.app._id,
            "case",
            "some_case_type",
            existing_report=None,
            data={
                'group_by': 'closed',
                'user_filters': '[]',
                'default_filters': '[]',
                'columns': '[{"property": "closed", "display_text": "closed", "calculation": "Count per Choice"}]',
            }
        )
        self.assertTrue(builder_form.is_valid())
        report = builder_form.create_report()

        self.assertEqual(report.config.configured_indicators[0]['datatype'], "string")

        # Make an edit to the first report builder report
        builder_form = ConfigureTableReportForm(
            "Test Report",
            self.app._id,
            "case",
            "some_case_type",
            existing_report=report,
            data={
                'group_by': 'user_id',
                'user_filters': '[]',
                'default_filters': '[]',
                # Note that a "Sum" calculation on the closed case property isn't very sensical, but doing it so
                # that I can have a numeric calculation without having to create real case properties for this case
                #  type.
                'columns': '[{"property": "closed", "display_text": "closed", "calculation": "Sum"}]',
            }
        )
        self.assertTrue(builder_form.is_valid())
        builder_form.update_report()

        # reload report data source, because report.config is memoized
        data_source = DataSourceConfiguration.get(report.config._id)
        # The closed property indicator should now be decimal type because the user indicated that it was numeric
        # by giving the column the "Sum" aggregation.
        self.assertEqual(data_source.configured_indicators[0]['datatype'], "decimal")
コード例 #23
0
ファイル: tasks.py プロジェクト: bradmerlin/commcare-hq
def rebuild_indicators(indicator_config_id):
    config = DataSourceConfiguration.get(indicator_config_id)
    adapter = IndicatorSqlAdapter(get_engine(), config)
    adapter.rebuild_table()

    couchdb = _get_db(config.referenced_doc_type)
    relevant_ids = get_doc_ids(config.domain, config.referenced_doc_type,
                               database=couchdb)

    for doc in iter_docs(couchdb, relevant_ids, chunksize=500):
        if config.filter.filter(doc):
            try:
                adapter.save(doc)
            except DataError as e:
                logging.exception('problem saving document {} to table. {}'.format(doc['_id'], e))
コード例 #24
0
    def test_data_source_columns(self):
        """
        Report Builder should create a data source that includes columns for all possible aggregations, so that if
        the user switches between a list report and a summary report the data source has all the required columns

        (FB 268655)
        """
        builder_form = ConfigureListReportForm(self.domain,
                                               "My Report",
                                               self.app._id,
                                               "form",
                                               self.form.unique_id,
                                               data={
                                                   'user_filters':
                                                   '[]',
                                                   'default_filters':
                                                   '[]',
                                                   'columns':
                                                   """[
                    {"property": "/data/first_name", "display_text": "first name"},
                    {"property": "/data/last_name", "display_text": "last name"},
                    {"property": "/data/children", "display_text": "children"}
                ]""",
                                               })
        self.assertTrue(builder_form.is_valid())
        with patch('corehq.apps.userreports.tasks.delete_data_source_task'):
            data_source_config_id = builder_form.create_temp_data_source_if_necessary(
                '*****@*****.**')
        data_source = DataSourceConfiguration.get(data_source_config_id)
        indicators = sorted([(ind['column_id'], ind['type'])
                             for ind in data_source.configured_indicators])
        expected_indicators = [
            ('count', 'boolean'),
            ('data_children_25bd0e0d',
             'expression'),  # "children" should have 2 columns because it is
            ('data_children_25bd0e0d_decimal', 'expression'),  # numeric
            ('data_dob_b6293169', 'expression'),
            ('data_first_name_ac8c51a7', 'expression'),
            ('data_last_name_ce36e9e1', 'expression'),
            ('data_state_6e36b993', 'choice_list'),
            ('data_state_6e36b993', 'expression'),
            ('deviceID_a7307e7d', 'expression'),
            ('timeEnd_09f40526', 'expression'),
            ('timeStart_c5a1ba73', 'expression'),
            ('userID_41e1d44e', 'expression'),
            ('username_ea02198f', 'expression'),
        ]
        self.assertEqual(indicators, expected_indicators)
コード例 #25
0
def migrate_linked_reports(upstream_domain=None):
    logger.setLevel(logging.INFO)
    if upstream_domain:
        domain_links = DomainLink.all_objects.filter(
            master_domain=upstream_domain)
    else:
        domain_links = DomainLink.all_objects.all()

    num_of_failed_attempts = 0
    for domain_link in domain_links:
        reports = get_report_configs_for_domain(domain_link.linked_domain)
        for report in reports:
            if report.report_meta.master_id and not report.config.meta.build.app_id:
                upstream_report = ReportConfiguration.get(
                    report.report_meta.master_id)
                upstream_datasource = DataSourceConfiguration.get(
                    upstream_report.config_id)
                downstream_app_id = get_downstream_app_id(
                    domain_link.linked_domain,
                    upstream_datasource.meta.build.app_id,
                )
                if not downstream_app_id:
                    # just as a backup in case upstream_app_id is not set but family_id is
                    downstream_app_id = get_downstream_app_id(
                        domain_link.linked_domain,
                        upstream_datasource.meta.build.app_id,
                        use_upstream_app_id=False)
                    if downstream_app_id:
                        logger.info(
                            f"Needed to use family_id to find downstream app {downstream_app_id}"
                        )

                if not downstream_app_id:
                    logger.warning(
                        f"Could not find downstream_app_id for upstream app"
                        f" {upstream_datasource.meta.build.app_id} "
                        f"in downstream domain {domain_link.linked_domain}")
                    num_of_failed_attempts += 1

                report.config.meta.build.app_id = downstream_app_id
                report.config.save()
    logger.info(
        f"Completed linked report migration with {num_of_failed_attempts} failed attempts"
    )
    return num_of_failed_attempts
コード例 #26
0
def create_linked_ucr(domain_link, report_config_id):
    if domain_link.is_remote:
        remote_configs = remote_get_ucr_config(domain_link, report_config_id)
        datasource = remote_configs["datasource"]
        report_config = remote_configs["report"]
    else:
        report_config = ReportConfiguration.get(report_config_id)
        datasource = DataSourceConfiguration.get(report_config.config_id)

    # grab the linked app this linked report references
    try:
        downstream_app_id = get_downstream_app_id(domain_link.linked_domain, datasource.meta.build.app_id)
    except MultipleDownstreamAppsError:
        raise DomainLinkError(_("This report cannot be linked because it references an app that has multiple "
                                "downstream apps."))

    new_datasource = _get_or_create_datasource_link(domain_link, datasource, downstream_app_id)
    new_report = _get_or_create_report_link(domain_link, report_config, new_datasource)
    return LinkedUCRInfo(datasource=new_datasource, report=new_report)
コード例 #27
0
ファイル: tasks.py プロジェクト: tlwakwella/commcare-hq
def _iteratively_build_table(config, last_id=None):
    couchdb = _get_db(config.referenced_doc_type)
    redis_client = get_redis_client().client.get_client()
    redis_key = _get_redis_key_for_config(config)
    indicator_config_id = config._id

    start_key = None
    if last_id:
        last_doc = _DOC_TYPE_MAPPING[config.referenced_doc_type].get(last_id)
        start_key = [config.domain, config.referenced_doc_type]
        if config.referenced_doc_type in _DATE_MAP.keys():
            date = json_format_datetime(last_doc[_DATE_MAP[config.referenced_doc_type]])
            start_key.append(date)

    relevant_ids = []
    for relevant_id in iterate_doc_ids_in_domain_by_type(
            config.domain,
            config.referenced_doc_type,
            chunk_size=CHUNK_SIZE,
            database=couchdb,
            startkey=start_key,
            startkey_docid=last_id):
        relevant_ids.append(relevant_id)
        if len(relevant_ids) >= CHUNK_SIZE:
            redis_client.rpush(redis_key, *relevant_ids)
            _build_indicators(indicator_config_id, relevant_ids)
            relevant_ids = []

    if relevant_ids:
        redis_client.rpush(redis_key, *relevant_ids)
        _build_indicators(indicator_config_id, relevant_ids)

    if not is_static(indicator_config_id):
        redis_client.delete(redis_key)
        config.meta.build.finished = True
        try:
            config.save()
        except ResourceConflict:
            current_config = DataSourceConfiguration.get(config._id)
            # check that a new build has not yet started
            if config.meta.build.initiated == current_config.meta.build.initiated:
                current_config.meta.build.finished = True
                current_config.save()
コード例 #28
0
    def test_skip_destructive_rebuild(self):
        self.config = self._get_config('add_non_nullable_col')
        self.config.disable_destructive_rebuild = True
        self.config.save()

        get_case_pillow(ucr_configs=[self.config])
        self.adapter = get_indicator_adapter(self.config)
        self.engine = self.adapter.engine

        # assert new date isn't in the config
        insp = reflection.Inspector.from_engine(self.engine)
        table_name = get_table_name(self.config.domain, self.config.table_id)
        self.assertEqual(
            len([
                c for c in insp.get_columns(table_name)
                if c['name'] == 'new_date'
            ]), 0)

        # add the column to the config
        self.config.configured_indicators.append({
            "column_id": "new_date",
            "type": "raw",
            "display_name": "new_date opened",
            "datatype": "datetime",
            "property_name": "other_opened_on",
            "is_nullable": False
        })
        self.config.save()

        # re-fetch from DB to bust object caches
        self.config = DataSourceConfiguration.get(self.config.data_source_id)

        # bootstrap to trigger rebuild
        get_case_pillow(ucr_configs=[self.config])

        logs = DataSourceActionLog.objects.filter(
            indicator_config_id=self.config.data_source_id,
            skip_destructive=True)
        self.assertEqual(1, len(logs))
        self.assertEqual(logs[0].migration_diffs, [{
            'type': 'add_column',
            'item_name': 'new_date'
        }])
コード例 #29
0
ファイル: tasks.py プロジェクト: jmaina/commcare-hq
def rebuild_indicators(indicator_config_id):
    is_static = indicator_config_id.startswith(CustomDataSourceConfiguration._datasource_id_prefix)
    if is_static:
        config = CustomDataSourceConfiguration.by_id(indicator_config_id)
    else:
        config = DataSourceConfiguration.get(indicator_config_id)

    adapter = IndicatorSqlAdapter(get_engine(), config)
    adapter.rebuild_table()

    couchdb = _get_db(config.referenced_doc_type)
    relevant_ids = get_doc_ids(config.domain, config.referenced_doc_type,
                               database=couchdb)

    for doc in iter_docs(couchdb, relevant_ids, chunksize=500):
        try:
            # save is a noop if the filter doesn't match
            adapter.save(doc)
        except DataError as e:
            logging.exception('problem saving document {} to table. {}'.format(doc['_id'], e))
    adapter.engine.dispose()
コード例 #30
0
ファイル: tasks.py プロジェクト: nnestle/commcare-hq
def rebuild_indicators(indicator_config_id):
    is_static = indicator_config_id.startswith(StaticDataSourceConfiguration._datasource_id_prefix)
    if is_static:
        config = StaticDataSourceConfiguration.by_id(indicator_config_id)
        rev = 'static'
    else:
        config = DataSourceConfiguration.get(indicator_config_id)
        rev = config._rev
        # Save the start time now in case anything goes wrong. This way we'll be
        # able to see if the rebuild started a long time ago without finishing.
        config.meta.build.initiated = datetime.datetime.utcnow()
        config.save()

    adapter = IndicatorSqlAdapter(config)

    couchdb = _get_db(config.referenced_doc_type)
    client = get_redis_client().client.get_client()
    redis_key = 'ucr_queue-{}:{}'.format(indicator_config_id, rev)

    if len(client.smembers(redis_key)) > 0:
        relevant_ids = client.smembers(redis_key)
    else:
        adapter.rebuild_table()
        relevant_ids = get_doc_ids_in_domain_by_type(config.domain, config.referenced_doc_type,
                                   database=couchdb)
        if relevant_ids:
            client.sadd(redis_key, *relevant_ids)

    for doc in iter_docs(couchdb, relevant_ids, chunksize=500):
        try:
            # save is a noop if the filter doesn't match
            adapter.save(doc)
            client.srem(redis_key, doc.get('_id'))
        except DataError as e:
            logging.exception('problem saving document {} to table. {}'.format(doc['_id'], e))

    if not is_static:
        client.delete(redis_key)
        config.meta.build.finished = True
        config.save()
コード例 #31
0
ファイル: tasks.py プロジェクト: philipkaare/commcare-hq
def _get_config_by_id(indicator_config_id):
    if is_static(indicator_config_id):
        return StaticDataSourceConfiguration.by_id(indicator_config_id)
    else:
        return DataSourceConfiguration.get(indicator_config_id)
コード例 #32
0
ファイル: data_source.py プロジェクト: aristide/commcare-hq
 def config(self):
     if self._config is None:
         self._config = DataSourceConfiguration.get(self._config_id)
     return self._config
コード例 #33
0
def _get_config_by_id(indicator_config_id):
    if id_is_static(indicator_config_id):
        return StaticDataSourceConfiguration.by_id(indicator_config_id)
    else:
        return DataSourceConfiguration.get(indicator_config_id)
コード例 #34
0
 def tearDown(self):
     self.config = DataSourceConfiguration.get(self.config.data_source_id)
     self.config.delete()
     self.adapter.drop_table()
     delete_all_cases()
     delete_all_xforms()