def test_update_invalid_coordinate_space_value_format_validation(self):
        indicator_location_data = IndicatorLocationData.objects.filter(
            level_reported=3, num_disaggregation=3).first()

        update_data = IndicatorLocationDataUpdateSerializer(
            indicator_location_data).data

        level_reported_3_key = None
        tuple_disaggregation = get_cast_dictionary_keys_as_tuple(
            update_data['disaggregation'])

        for key in tuple_disaggregation:
            if len(key) == 3:
                level_reported_3_key = key
                break

        update_data['disaggregation'][str(level_reported_3_key)] = {}

        url = reverse('indicator-location-data-entries-put-api')
        response = self.client.put(url, update_data, format='json')

        self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
        self.assertIn(
            "coordinate space value does not " +
            "have correct value key structure: c, d, v",
            response.data['non_field_errors'][0])
    def test_update_invalid_coordinate_space_key_validation(self):
        indicator_location_data = IndicatorLocationData.objects.filter(
            level_reported=3, num_disaggregation=3).first()

        next_disaggregation_value_id = DisaggregationValue.objects.count() + 1

        update_data = IndicatorLocationDataUpdateSerializer(
            indicator_location_data).data

        level_reported_3_key = None
        tuple_disaggregation = get_cast_dictionary_keys_as_tuple(
            update_data['disaggregation'])

        for key in tuple_disaggregation:
            if len(key) == 3:
                level_reported_3_key = key
                break

        del update_data['disaggregation'][str(level_reported_3_key)]

        level_reported_3_key = list(level_reported_3_key[:-1])
        level_reported_3_key.append(next_disaggregation_value_id)
        update_data['disaggregation'][str(tuple(level_reported_3_key))] = {}

        url = reverse('indicator-location-data-entries-put-api')
        response = self.client.put(url, update_data, format='json')

        self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST)
        self.assertIn(
            "coordinate space does not " +
            "belong to disaggregation value id list",
            response.data['non_field_errors'][0])
    def test_update_level_reported_3(self):
        indicator_location_data = IndicatorLocationData.objects.filter(
            level_reported=3, num_disaggregation=3).first()

        update_data = IndicatorLocationDataUpdateSerializer(
            indicator_location_data).data

        level_reported_3_key = None
        tuple_disaggregation = get_cast_dictionary_keys_as_tuple(
            update_data['disaggregation'])

        for key in tuple_disaggregation:
            if len(key) == 3:
                level_reported_3_key = key
                break

        correct_total = update_data['disaggregation']['()']['v'] \
            - update_data['disaggregation'][str(level_reported_3_key)]['v']
        update_data['disaggregation'][str(level_reported_3_key)]['v'] = 0

        url = reverse('indicator-location-data-entries-put-api')
        response = self.client.put(url, update_data, format='json')

        self.assertEquals(response.status_code, status.HTTP_200_OK)
        self.assertEquals(response.data['disaggregation']['()']['v'],
                          correct_total)
Ejemplo n.º 4
0
    def post_process(indicator_location_data):
        vt = ValueType.VALUE
        dt = ValueType.DENOMINATOR
        ct = ValueType.CALCULATED

        # copy of idl.disaggregation with the keys converted tuples from the original string type
        disagg = get_cast_dictionary_keys_as_tuple(indicator_location_data.disaggregation)

        # indicator_location_data.level_reported cannot be trusted to reflect correctly the structure 
        # therefore calculating it:
        level_reported = 0
        for k in disagg.keys():
            level_reported = len(k) if len(k) > level_reported else level_reported

        calc_data = {}
        for k in disagg.keys():
            if len(k) == level_reported:
                calc_data[k] = {
                    vt: disagg[k][vt],
                    ct: disagg[k][vt],
                    dt: 1
                }
                for sk in get_all_subkeys(k):
                    if sk not in calc_data:
                        calc_data[sk] = get_zero_dict("SUM")
                    calc_data[sk] = calculate_sum(calc_data[k], calc_data[sk])

        disaggregation = get_cast_dictionary_keys_as_string(calc_data)

        indicator_location_data.disaggregation = disaggregation
        indicator_location_data.save()

        indicator_report = indicator_location_data.indicator_report
        QuantityIndicatorDisaggregator.calculate_indicator_report_total(indicator_report)
Ejemplo n.º 5
0
    def test_post_process_location_calc_with_zero_value_entry(self):
        unit_type = IndicatorBlueprint.PERCENTAGE
        calc_type = IndicatorBlueprint.SUM
        display_type = IndicatorBlueprint.RATIO

        blueprint = RatioTypeIndicatorBlueprintFactory(
            unit=unit_type,
            calculation_formula_across_locations=calc_type,
            calculation_formula_across_periods=calc_type,
            display_type=display_type,
        )
        partneractivity_reportable = RatioReportableToPartnerActivityProjectContextFactory(
            content_object=self.project_context, blueprint=blueprint)

        partneractivity_reportable.disaggregations.clear()

        add_disaggregations_to_reportable(
            partneractivity_reportable,
            disaggregation_targets=["age", "gender", "height"])

        LocationWithReportableLocationGoalFactory(
            location=self.loc1,
            reportable=partneractivity_reportable,
        )

        ir = ClusterIndicatorReportFactory(
            reportable=partneractivity_reportable,
            report_status=INDICATOR_REPORT_STATUS.due,
        )

        # Creating Level-3 disaggregation location data for all locations
        generate_3_num_disagg_data(partneractivity_reportable,
                                   indicator_type="ratio")

        loc_data1 = ir.indicator_location_data.first()

        # Mark some data entries on location data 1 to be zero
        level_reported_3_key = None
        tuple_disaggregation = get_cast_dictionary_keys_as_tuple(
            loc_data1.disaggregation)

        for key in tuple_disaggregation:
            if len(key) == 3:
                level_reported_3_key = key
                break

        validated_data = copy.deepcopy(loc_data1.disaggregation)

        old_totals = validated_data['()']
        loc_data1.disaggregation[str(level_reported_3_key)]['d'] = 0
        loc_data1.disaggregation[str(level_reported_3_key)]['v'] = 0
        loc_data1.disaggregation[str(level_reported_3_key)]['c'] = 0
        loc_data1.save()

        RatioIndicatorDisaggregator.post_process(loc_data1)

        self.assertNotEqual(old_totals['c'],
                            loc_data1.disaggregation['()']['c'])
Ejemplo n.º 6
0
    def get_disaggregation(self, obj):
        ordered_dict = get_cast_dictionary_keys_as_tuple(obj.disaggregation)

        ordered_dict = get_sorted_ordered_dict_by_keys(
            ordered_dict, reverse=True)

        ordered_dict = get_cast_dictionary_keys_as_string(ordered_dict)

        return ordered_dict
Ejemplo n.º 7
0
    def test_get_cast_dictionary_keys_as_tuple(self):
        string_dict = get_cast_dictionary_keys_as_string(self.entry_dict)

        converted_dict = get_cast_dictionary_keys_as_tuple(string_dict)

        keys = converted_dict.keys()

        for key in keys:
            self.assertIsInstance(key, tuple)
Ejemplo n.º 8
0
    def post_process(indicator_location_data):
        """
        post_process will perform the followings:
        1. Calculate SUM of all v and d for all level_reported.
        2. Calculate c value from v and d for all level_reported entries.
        """
        vt = ValueType.VALUE
        dt = ValueType.DENOMINATOR
        ct = ValueType.CALCULATED

        disagg = get_cast_dictionary_keys_as_tuple(indicator_location_data.disaggregation)
        # indicator_location_data.level_reported cannot be trusted to reflect correctly the structure 
        # therefore calculating it:
        level_reported = 0
        for k in disagg.keys():
            level_reported = len(k) if len(k) > level_reported else level_reported

        calc_data = {}
        for k in disagg.keys():
            if len(k) == level_reported:
                calc_data[k] = {
                    vt: disagg[k][vt],
                    ct: disagg[k][vt] / float(disagg[k][dt]) if disagg[k][dt] else 0,
                    dt: disagg[k][dt]
                }
                for sk in get_all_subkeys(k):
                    if sk not in calc_data:
                        calc_data[sk] = get_zero_dict("RATIO")
                    calc_data[sk] = calculate_sum(calc_data[k], calc_data[sk], indicator_type="RATIO")

        indicator_location_data.disaggregation = get_cast_dictionary_keys_as_string(calc_data)
        indicator_location_data.save()

        indicator_report = indicator_location_data.indicator_report

        RatioIndicatorDisaggregator.calculate_indicator_report_total(
            indicator_report)
    def post_process(indicator_location_data):
        level_reported = indicator_location_data.level_reported

        ordered_dict = get_cast_dictionary_keys_as_tuple(
            indicator_location_data.disaggregation)

        ordered_dict_keys = list(ordered_dict.keys())

        if level_reported == 0:
            ordered_dict[tuple()]["d"] = 1
            ordered_dict[tuple()]["c"] = ordered_dict[tuple()]["v"]

        else:
            # Reset all subtotals
            for key in ordered_dict_keys:
                if len(key) == level_reported:
                    ordered_dict[key]["d"] = 1
                    ordered_dict[key]["c"] = ordered_dict[key]["v"]

                    packed_key = map(lambda item: tuple([item]), key)
                    subkey_combinations = generate_data_combination_entries(
                        packed_key,
                        entries_only=True,
                        key_type=tuple,
                        r=level_reported - 1
                    )

                    for subkey in subkey_combinations:
                        ordered_dict[subkey] = {
                            'c': 0,
                            'd': 1,
                            'v': 0,
                        }

            ordered_dict_keys = list(ordered_dict.keys())

            # Calculating subtotals
            for key in ordered_dict_keys:
                if len(key) == level_reported:
                    packed_key = map(lambda item: tuple([item]), key)
                    subkey_combinations = generate_data_combination_entries(
                        packed_key,
                        entries_only=True,
                        key_type=tuple,
                        r=level_reported - 1
                    )

                    # It is always SUM at IndicatorLocationData level
                    for subkey in subkey_combinations:
                        ordered_dict[subkey]["v"] += \
                            ordered_dict[key]["v"]

                        ordered_dict[subkey]["c"] += \
                            ordered_dict[key]["c"]

        ordered_dict = get_cast_dictionary_keys_as_string(ordered_dict)

        indicator_location_data.disaggregation = ordered_dict
        indicator_location_data.save()

        # Reset the IndicatorReport total
        ir_total = {
            'c': 0,
            'd': 0,
            'v': 0,
        }
        ir_total['d'] = 1

        indicator_report = indicator_location_data.indicator_report

        # IndicatorReport total calculation
        if indicator_report.calculation_formula_across_locations == IndicatorBlueprint.MAX:
            max_total_loc = max(
                indicator_report.indicator_location_data.all(),
                key=lambda item: item.disaggregation['()']['v'])

            ir_total = max_total_loc.disaggregation['()']

        else:
            for loc_data in indicator_report.indicator_location_data.all():
                loc_total = loc_data.disaggregation['()']

                ir_total['v'] += loc_total['v']
                ir_total['c'] += loc_total['c']

        if indicator_report.calculation_formula_across_locations == IndicatorBlueprint.AVG:
            loc_count = indicator_report.indicator_location_data.count()

            ir_total['v'] = ir_total['v'] / (loc_count * 1.0)
            ir_total['c'] = ir_total['c'] / (loc_count * 1.0)

        indicator_report.total = ir_total
        indicator_report.save()
    def post_process(indicator_location_data):
        """
        post_process will perform the followings:
        1. Calculate SUM of all v and d for all level_reported.
        2. Calculate c value from v and d for all level_reported entries.
        """
        level_reported = indicator_location_data.level_reported

        ordered_dict = get_cast_dictionary_keys_as_tuple(
            indicator_location_data.disaggregation)

        ordered_dict_keys = list(ordered_dict.keys())

        if level_reported != 0:
            # Reset all subtotals
            for key in ordered_dict_keys:
                if len(key) == level_reported:
                    packed_key = map(lambda item: tuple([item]), key)
                    subkey_combinations = generate_data_combination_entries(
                        packed_key,
                        entries_only=True,
                        key_type=tuple,
                        r=level_reported - 1
                    )

                    for subkey in subkey_combinations:
                        ordered_dict[subkey] = {
                            'c': 0,
                            'd': 0,
                            'v': 0,
                        }

            # Calculating subtotals
            for key in ordered_dict_keys:
                if len(key) == level_reported:
                    packed_key = map(lambda item: tuple([item]), key)
                    subkey_combinations = generate_data_combination_entries(
                        packed_key,
                        entries_only=True,
                        key_type=tuple,
                        r=level_reported - 1
                    )

                    # It is always SUM at IndicatorLocationData level
                    for subkey in subkey_combinations:
                        ordered_dict[subkey]["v"] += \
                            ordered_dict[key]["v"]

                        ordered_dict[subkey]["d"] += \
                            ordered_dict[key]["d"]

        # Calculating all level_reported N c values
        for key in ordered_dict_keys:
            if ordered_dict[key]["v"] == 0 and ordered_dict[key]["d"] == 0:
                ordered_dict[key]["c"] = 0
            elif ordered_dict[key]["d"] == 0:
                raise Exception(
                    'Denominator is 0 when numerator is not for {}'.format(key))
            else:
                ordered_dict[key]["c"] = ordered_dict[key]["v"] / \
                    (ordered_dict[key]["d"] * 1.0)

        ordered_dict = get_cast_dictionary_keys_as_string(ordered_dict)

        indicator_location_data.disaggregation = ordered_dict
        indicator_location_data.save()

        # Reset the IndicatorReport total
        ir_total = {
            'c': 0,
            'd': 0,
            'v': 0,
        }

        indicator_report = indicator_location_data.indicator_report

        for loc_data in indicator_report.indicator_location_data.all():
            loc_total = loc_data.disaggregation['()']

            ir_total['v'] += loc_total['v']
            ir_total['d'] += loc_total['d']

        ir_total["c"] = ir_total["v"] / (ir_total["d"] * 1.0)

        indicator_report.total = ir_total
        indicator_report.save()