Ejemplo n.º 1
0
    def loadDVMaxMinInAggregation(
        self,
        my_measure: json,
        m_stop_date: datetime,
        agg_decas: list,
        m_agg_j: json,
        delta_values: json,
        dv_next: json,
        trace_flag: bool = False,
        b_use_rate: bool = False,
    ):
        # if m_stop_date.minute > 0 or m_stop_date.second > 1:
        #     return
        if str(agg_decas[1].getLevel()).lower()[0] == 'h':
            # get the max/min of the value from the agg_hour
            target_key = my_measure['target_key']
            src_key = my_measure['src_key']
            agg_j = agg_decas[0].data.j
            for maxmin_suffix in ['_max', '_min']:
                if agg_j.__contains__(src_key + maxmin_suffix):
                    delta_values[target_key + maxmin_suffix] = my_measure['dataType'](agg_j[src_key + maxmin_suffix])
                    delta_values[target_key + maxmin_suffix + '_time'] = agg_j[src_key + maxmin_suffix + '_time']
                    if (isFlagged(my_measure['special'], MeasureProcessingBitMask.MeasureIsWind)):
                        if agg_j.__contains__(src_key + maxmin_suffix + '_dir') is True:
                            delta_values[target_key + maxmin_suffix + '_dir'] = float(agg_j[target_key + maxmin_suffix + '_dir'])

        super().loadDVMaxMinInAggregation(my_measure, m_stop_date, agg_decas, m_agg_j, delta_values, dv_next, trace_flag)
Ejemplo n.º 2
0
    def loadDataInObs(
        self,
        my_measure: json,
        obs_meteor: ObsMeteor,
        target_key: str,
        delta_values: json,
        my_values: json,
        trace_flag: bool,
    ):
        """
            loadObservationDatarow

            load Observation dataRow from json measures

            load delta_values from json mesures
        """
        obs_j = obs_meteor.data.j

        my_value_avg = my_values.get(target_key + '_a')
        my_value_instant = my_values.get(target_key + '_i')
        my_value_dir = my_values.get(target_key + '_di')
        tmp_duration = my_values.get(target_key + '_du')
        if isFlagged(my_measure['special'], MeasureProcessingBitMask.MeasureIsOmm):
            tmp_duration = 60
        if my_value_avg is None and my_value_instant is None:
            return
        tmp_sum = my_value_avg if my_value_avg is not None else my_value_instant

        if my_value_avg is None:
            if my_value_instant is None:
                # no data suitable for us
                return
            else:
                my_value_avg = my_value_instant

        # get current values from our aggregations
        tmp_sum_old = tmp_duration_old = 0
        if obs_j.get(target_key) is not None:
            tmp_sum_old = obs_j[target_key]
            if tmp_sum == tmp_sum_old:
                # no change on avg computation
                return
            tmp_duration_old = obs_j.get(target_key + '_duration')
            if tmp_duration is None:
                tmp_duration_old = obs_meteor.data.duration
            if tmp_duration != tmp_duration_old:
                raise('loadDataInObs', 'duration mitchmath for ' + target_key + ': in obs' + str(tmp_duration_old) + ', in json: ' + str(tmp_duration))
            delta_values[target_key + '_sum_old'] = tmp_sum_old
            delta_values[target_key + '_duration_old'] = tmp_duration_old

        # save data in dv
        delta_values[target_key + '_sum'] = tmp_sum
        delta_values[target_key + '_duration'] = tmp_duration

        # save data in obs
        if my_value_instant is not None:
            obs_j[target_key] = my_value_instant

        if my_value_dir is not None:
            obs_j[target_key + '_dir'] = my_value_dir
Ejemplo n.º 3
0
    def loadInObs(self, poste_metier, my_measure: json, json_file_data: json, measure_idx: int, m_agg_j: json, obs_meteor: ObsMeteor, delta_values: json, trace_flag: bool = False):
        """
            processObservation

            load json data in Observation table

            load max/min

            return the delta_values to be added in all aggregations

            some methods are implemented here, some in the inherited class
        """

        # load field if defined in json
        src_key = my_measure['src_key']
        target_key = my_measure['target_key']

        # get exclusion, and return if value is nullified
        exclusion = poste_metier.exclusion(my_measure['type_i'])
        # to check later...
        # if shouldNullify(exclusion, src_key) is True:
        #     return

        my_values = {}
        if target_key == "rain_rate":
            target_key = "rain_rate"

        self.loadValuesFromCurrent(my_measure, json_file_data, measure_idx, src_key, target_key, exclusion, my_values, obs_meteor.data.stop_dat, trace_flag)

        if my_values.__len__() == 0 and m_agg_j.__len__() == 0:
            return

        if (isFlagged(my_measure['special'], MeasureProcessingBitMask.NotAllowedInCurrent) is True):
            return

        # load Json data in dv

        # update duration & agg_start_dat in obs if needed
        if obs_meteor.data.duration == 0 and my_values.__len__() > 1:
            tmp_duration = delta_values.get(target_key + '_du')
            obs_meteor.data.duration = tmp_duration
            # compute our agg_h.start_dat for faster retrieval of observation for a given agg_h.start_dat
            obs_meteor.data.agg_start_dat = calcAggDate('H', obs_meteor.data.stop_dat, tmp_duration, True)

            # double check that the duration are compatible
            if obs_meteor.data.duration != tmp_duration:
                raise Exception('loadObsDatarow', 'incompatible durations -> in table obs: ' + str(obs_meteor.data.duration) + ', in json: ' + str(tmp_duration))

        # load data from dv to obs
        self.loadDataInObs(my_measure, obs_meteor, target_key, delta_values, my_values, trace_flag)

        # check maxmin that need to be regenated later
        self.checkMaxMinToRegenerate(my_measure, obs_meteor, target_key, delta_values, my_values, trace_flag)

        # load Max/Min in obs, and in dv
        self.loadMaxMinInObs(my_measure, obs_meteor, target_key, delta_values, my_values, trace_flag)
        return
Ejemplo n.º 4
0
    def loadDataInObs(
        self,
        my_measure: json,
        obs_meteor: ObsMeteor,
        target_key: str,
        delta_values: json,
        my_values: json,
        trace_flag: bool,
    ):
        """
            loadObservationDatarow

            load Observation dataRow from json measures

            load delta_values from json mesures
        """
        obs_j = obs_meteor.data.j

        if my_measure['target_key'] == 'barometer':
            my_value_avg = my_values.get(target_key + '_a')

        my_value_avg = my_values.get(target_key + '_a')
        my_value_instant = my_values.get(target_key + '_i')
        my_value_dir = my_values.get(target_key + '_di')
        tmp_duration = my_values.get(target_key + '_du')
        if isFlagged(my_measure['special'],
                     MeasureProcessingBitMask.MeasureIsOmm):
            tmp_duration = 60
        if my_value_avg is None:
            if my_value_instant is None:
                return
            else:
                my_value_avg = my_value_instant

        # get our value to aggregate
        if my_measure.__contains__('measureType'):
            if my_measure['measureType'] == 'inst':
                # for omm measure...
                my_value_aggreg = my_value_instant
            else:
                my_value_aggreg = my_value_avg
        else:
            if my_value_instant is not None:
                my_value_aggreg = my_value_instant

        tmp_s = my_value_aggreg * tmp_duration

        if my_value_aggreg is None:
            # no data suitable for us
            return

        # get current values from our aggregations
        tmp_s_old = tmp_duration_old = 0
        if obs_j.get(target_key + '_s') is not None:
            tmp_s_old = obs_j[target_key + '_s']
            if tmp_s == tmp_s_old:
                # no change on avg computation
                return
            tmp_duration_old = obs_j.get(target_key + '_duration')
            if tmp_duration_old is None:
                tmp_duration_old = tmp_duration
            if tmp_duration != tmp_duration_old:
                raise ('loadDataInObs', 'duration mitchmath for ' +
                       target_key + ': in obs' + str(tmp_duration_old) +
                       ', in json: ' + str(tmp_duration))
            delta_values[target_key + '_s_old'] = tmp_s_old
            delta_values[target_key + '_duration_old'] = tmp_duration_old

        # save data in dv
        delta_values[target_key + '_s'] = tmp_s
        delta_values[target_key + '_duration'] = tmp_duration

        # save data in obs
        obs_j[target_key] = my_value_aggreg

        if my_value_dir is not None:
            obs_j[target_key + '_dir'] = my_value_dir
Ejemplo n.º 5
0
    def loadDVDataInAggregation(self,
                                my_measure: json,
                                m_stop_dat: datetime,
                                agg_deca: AggMeteor,
                                m_agg_j: json,
                                delta_values: json,
                                dv_next: json,
                                trace_flag: bool = False,
                                agg_suffix: str = '_sum'):
        """
            loadDVDataInAggregation

            Load one aggretation level with values from delta_values, update dv_next

            parameters:
                my_measure: measure definition
                stop_dat: mesure stop_dat
                agg_deca
                m_agg_j: aggregations clause in json file
                delta_values: json for forced values
                dv_next: delta_values for next level
                flag: True=insert, False=delete
        """
        target_key = my_measure['target_key']
        agg_j = agg_deca.data.j
        agg_level = agg_deca.getLevelCode()
        has_data = False

        if target_key == 'rain_omm':
            has_data = False

        # load old measure values in case of an update of Observation, and only in agg_hour
        tmp_sum_old = tmp_duration_old = 0
        if delta_values.__contains__(target_key + '_sum_old'):
            tmp_sum_old = delta_values[target_key + '_sum_old']
            tmp_duration_old = delta_values[target_key + '_duration_old']
            has_data = True

        # ------------------------------------------------------------------
        # get our new data
        # 1 from dv[target_key + '_sum']
        # 2 from m_agg_j[target_key + '_sum']
        # last win
        # ------------------------------------------------------------------

        if delta_values.get('duration') is None:
            tmp_duration = getAggDuration(agg_level[0],
                                          agg_deca.data.start_dat)
        else:
            tmp_duration = float(delta_values["duration"])

        # get our M_s from our delta_values
        tmp_tmp = self.get_json_value(delta_values, target_key, [agg_suffix],
                                      None)
        if tmp_tmp is not None:
            has_data = True
            tmp_sum = float(tmp_tmp)
        if delta_values.get(target_key + '_duration') is not None:
            if isFlagged(my_measure['special'], MeasureProcessingBitMask.
                         MeasureIsOmm) and agg_level[0] == 'H':
                tmp_duration = getAggDuration(agg_level[0],
                                              agg_deca.data.start_dat)
            else:
                tmp_duration = delta_values[target_key + '_duration']

        synos = [target_key]
        if my_measure.get('syno') is not None:
            for a_syno in my_measure['syno']:
                synos.append(a_syno)

        b_value_found = False
        for a_key in synos:
            if b_value_found:
                continue
            tmp_tmp = self.get_json_value(m_agg_j, a_key, [agg_suffix], True)
            if tmp_tmp is not None:
                has_data = True
                tmp_sum = float(tmp_tmp)
                if m_agg_j.__contains__(a_key + '_duration') is True:
                    tmp_duration = float(m_agg_j[a_key + '_duration'])
                else:
                    tmp_duration = getAggDuration(agg_level[0],
                                                  agg_deca.data.start_dat)
                if isFlagged(my_measure['special'],
                             MeasureProcessingBitMask.MeasureIsOmm):
                    tmp_duration = getAggDuration(agg_level[0],
                                                  agg_deca.data.start_dat)
                b_value_found = True

        # return if the aggregation should not be sent to upper levels
        if has_data is False:
            return

        addJson(agg_j, target_key + agg_suffix, tmp_sum - tmp_sum_old)
        addJson(agg_j, target_key + '_duration',
                tmp_duration - tmp_duration_old)

        tmp_sum_new = agg_j[target_key + agg_suffix]
        tmp_duration_new = agg_j[target_key + '_duration']

        if tmp_duration_new == 0:
            # no duration, delete all keys
            delKey(agg_j, target_key + agg_suffix)
            delKey(agg_j, target_key + '_duration')
            delta_values[target_key + '_delete_me'] = True
        else:
            # Add omm values in agg_hour
            if isFlagged(my_measure['special'],
                         MeasureProcessingBitMask.MeasureIsOmm):
                agg_j[target_key] = tmp_sum_new

        if isFlagged(my_measure['special'],
                     MeasureProcessingBitMask.OnlyAggregateInHour) is True:
            return

        # propagate to next level if no limitation on aggregation level
        dv_next[target_key + agg_suffix] = tmp_sum - tmp_sum_old
        dv_next[target_key + '_duration'] = tmp_duration - tmp_duration_old
        if delta_values.get('duration') is None:
            dv_next['duration'] = tmp_duration
        else:
            dv_next["duration"] = delta_values['duration']
Ejemplo n.º 6
0
    def _loadValuesFromCurrent(
        self,
        my_measure: json,
        json_file_data: json,
        measure_idx: int,
        src_key: str,
        target_key: str,
        exclusion: json,
        my_values: json,
        key_suffix: str,
        stop_dat: datetime,
        trace_flag: bool,
    ):
        """
            Load ou values in delta_value for value passing to loadDataInObs, and loadMaxMinInObsInObservation
        """
        # b_exclu = True -> load data from exclusion, False -> normal processing
        if src_key == 'rain_rate':
            b_exclu = loadFromExclu(exclusion, src_key)

        b_exclu = loadFromExclu(exclusion, src_key)

        my_value_instant = my_value_avg = None
        my_value_dir = None

        if my_measure['target_key'] == 'barometer':
            measure_type = 3

        measure_type = 3
        if my_measure.__contains__('measureType'):
            if my_measure['measureType'] == 'avg':
                measure_type = 1
            elif my_measure['measureType'] == 'inst':
                measure_type = 2
            elif my_measure['measureType'] != 'both':
                raise Exception('processJsonDataAvg::loadDataInObs', 'invalid measureType: ' + my_measure['measureType'] + ' for ' + src_key)

        if b_exclu is False:
            # load our data from the measure (json)
            data_src = json_file_data['data'][measure_idx].get('current')
        else:
            data_src = exclusion

        all_src_keys = [my_measure['src_key']]
        if my_measure.get('syno') is not None:
            for a_syno in my_measure['syno']:
                all_src_keys.append(a_syno)
        value_found = False
        for a_srckey in all_src_keys:
            if value_found:
                continue
            if data_src is not None:
                if data_src.__contains__(a_srckey) and (measure_type & 2) == 2:
                    my_value_instant = self.get_json_value(data_src, a_srckey, [], True)
                    value_found = True
                if data_src.__contains__(a_srckey + key_suffix) and (measure_type & 1) == 1:
                    my_value_avg = self.get_json_value(data_src, a_srckey + key_suffix, [], True)
                    value_found = True
                if (isFlagged(my_measure['special'], MeasureProcessingBitMask.MeasureIsWind)):
                    if data_src.__contains__(a_srckey + '_dir'):
                        my_value_dir = data_src[a_srckey + '_dir']
                    elif data_src.__contains__(a_srckey + key_suffix + '_dir'):
                        my_value_dir = data_src[a_srckey + '_dir']

        # init value instantaneout and avg
        # measure_type         I      1      I      2      I      3
        # measure_avg (ma)     I ma, then mi I     None    I ma, then mi
        # measure_instant (mi) I     None    I mi, then ma I mi, then ma

        if measure_type == 1:
            if my_value_avg is None:
                my_value_avg = my_value_instant
            if measure_type == 1:
                my_value_instant = None
        if measure_type == 2:
            if my_value_instant is None:
                my_value_instant = my_value_avg
            if measure_type == 2:
                my_value_avg = None
        if measure_type == 3:
            if my_value_avg is None:
                my_value_avg = my_value_instant
            if my_value_instant is None:
                my_value_instant = my_value_avg

        if my_value_avg is not None:
            my_values[target_key + '_a'] = my_value_avg
        if my_value_instant is not None:
            my_values[target_key + '_i'] = my_value_instant
        if my_value_dir is not None:
            my_values[target_key + '_di'] = my_value_dir
        tmp_duration = int(json_file_data['data'][measure_idx]['current']['duration'])
        if isFlagged(my_measure['special'], MeasureProcessingBitMask.MeasureIsOmm):
            tmp_duration = 60
        if tmp_duration != 0:
            my_values[target_key + '_du'] = tmp_duration

        # load max/min from json
        for maxmin_key in ['max', 'min']:
            if my_measure.__contains__(maxmin_key) is True and my_measure[maxmin_key] is True:
                maxmin_suffix = '_' + maxmin_key
                if data_src is not None and data_src.get(src_key + maxmin_suffix) is not None:
                    my_values[target_key + maxmin_suffix] = data_src[src_key + maxmin_suffix]
                    my_values[target_key + maxmin_suffix + '_time'] = data_src[src_key + maxmin_suffix + '_time']
                    if maxmin_key == 'max' and (isFlagged(my_measure['special'], MeasureProcessingBitMask.MeasureIsWind)):
                        if data_src is not None and data_src.get(src_key + maxmin_suffix + '_dir') is not None:
                            my_values[target_key + maxmin_suffix + '_dir'] = data_src[src_key + maxmin_suffix + '_dir']
                else:
                    if my_value_avg is not None or my_value_instant is not None:
                        my_values[target_key + maxmin_suffix] = my_value_instant if my_value_instant is not None else my_value_avg
                        if data_src is not None and data_src.get(src_key + maxmin_suffix + "_time") is not None:
                            my_values[target_key + maxmin_suffix + '_time'] = data_src[src_key + maxmin_suffix + '_time']
                        else:
                            my_values[target_key + maxmin_suffix + '_time'] = stop_dat
Ejemplo n.º 7
0
    def loadDVMaxMinInAggregation(
        self,
        my_measure: json,
        m_stop_date: datetime,
        agg_decas: AggMeteor,
        m_agg_j: json,
        delta_values: json,
        dv_next: json,
        trace_flag: bool = False,
    ):
        """
            loadDVMaxMinInAggregation

            load in all aggregations max/min
            update dv_next for nest level
        """
        target_key = my_measure['target_key']
        idx_maxmin = 0
        for maxmin_suffix in ['_max', '_min']:
            idx_maxmin += 1
            agg_j = agg_decas[idx_maxmin].data.j

            maxmin_key = maxmin_suffix.split('_')[1]

            if my_measure.__contains__(
                    maxmin_key) and my_measure[maxmin_key] is True:
                new_calulated_maxmin = None
                new_calulated_maxmin_dir = None

                if target_key == "wind" and maxmin_key == 'max':
                    target_key = "wind"

                # check if measure was deleted
                if delta_values.__contains__(target_key +
                                             '_delete_me') is True:
                    # measure was deleted previously
                    if agg_j.__contains__(target_key + maxmin_suffix):
                        # need to invalidate this value for next level
                        invalid_value = agg_j[target_key + maxmin_suffix]
                        dv_next[target_key + maxmin_suffix +
                                '_invalidate'] = invalid_value
                        dv_next[target_key + '_check' +
                                maxmin_suffix] = agg_j[target_key +
                                                       maxmin_suffix]
                    delKey(agg_j, target_key + maxmin_suffix)
                    delKey(agg_j, target_key + maxmin_suffix + '_time')
                    continue

                # get the current max/min (the last load win)
                # - load current value form delta_values
                # - load current max/min from delta_values if given
                # - load from "aggregations" clause in the json data file if given
                if delta_values.__contains__(target_key):
                    new_calulated_maxmin = my_measure['dataType'](
                        delta_values[target_key])
                    new_calulated_maxmin_time = m_stop_date
                    if (isFlagged(my_measure['special'],
                                  MeasureProcessingBitMask.MeasureIsWind)):
                        if delta_values.__contains__(target_key +
                                                     maxmin_suffix +
                                                     '_dir') is True:
                            new_calulated_maxmin_dir = float(
                                delta_values[target_key + maxmin_suffix +
                                             '_dir'])

                if delta_values.__contains__(target_key +
                                             maxmin_suffix) is True:
                    new_calulated_maxmin = my_measure['dataType'](
                        delta_values[target_key + maxmin_suffix])
                    new_calulated_maxmin_time = delta_values[target_key +
                                                             maxmin_suffix +
                                                             '_time']
                    if (isFlagged(my_measure['special'],
                                  MeasureProcessingBitMask.MeasureIsWind)):
                        if delta_values.__contains__(target_key +
                                                     maxmin_suffix +
                                                     '_dir') is True:
                            new_calulated_maxmin_dir = float(
                                delta_values[target_key + maxmin_suffix +
                                             '_dir'])

                if m_agg_j.__contains__(target_key + maxmin_suffix):
                    new_calulated_maxmin = my_measure['dataType'](
                        m_agg_j[target_key + maxmin_suffix])
                    new_calulated_maxmin_time = m_agg_j[target_key +
                                                        maxmin_suffix +
                                                        '_time']
                    if (isFlagged(my_measure['special'],
                                  MeasureProcessingBitMask.MeasureIsWind)):
                        if m_agg_j.__contains__(target_key + maxmin_suffix +
                                                '_dir') is True:
                            new_calulated_maxmin_dir = float(
                                m_agg_j[target_key + maxmin_suffix + '_dir'])

                if new_calulated_maxmin is None:
                    # should never occurs...
                    continue

                # load current max/min value from our aggregation
                agg_maxmin = None
                if agg_j.__contains__(target_key + maxmin_suffix):
                    agg_maxmin = agg_j[target_key + maxmin_suffix]
                """
                invalidation decision tree for [field]_max

                new_calulated_maxmin  +   agg_maxmin  +  former_maxmin_value +  action
                basic
                    10                +               +           No          +  update
                    10                +       5       +           No          +  update
                    10                +       10      +           No          +  pass
                    10                +       15      +           No          +  pass

                check_maxmin flag
                    10                +       5       +           5           +  update -> auto
                    10                +       10      +           10          +  pass -> auto
                    10                +       15      +           15          +  recompute
                """
                if delta_values.__contains__(target_key + '_check' +
                                             maxmin_suffix):
                    if agg_maxmin is None:
                        raise Exception(
                            'loadDVMaxMinInAggregation',
                            'Invalidate and no data in aggregation...')
                    former_maxmin_value = delta_values[target_key + '_check' +
                                                       maxmin_suffix]
                    if maxmin_suffix == '_max':
                        if agg_maxmin > former_maxmin_value:
                            agg_maxmin = None
                            dv_next[target_key + '_check' +
                                    maxmin_suffix] = former_maxmin_value
                            self.add_new_maxmin_fix(
                                target_key, maxmin_key,
                                my_measure['deca' + maxmin_suffix],
                                agg_decas[idx_maxmin], delta_values)
                    else:
                        if agg_maxmin < former_maxmin_value:
                            agg_maxmin = None
                            dv_next[target_key + '_check' +
                                    maxmin_suffix] = former_maxmin_value
                            self.add_new_maxmin_fix(
                                target_key, maxmin_key,
                                my_measure['deca' + maxmin_suffix],
                                agg_decas[idx_maxmin], delta_values)

                if agg_maxmin is None:
                    # force the update i agg_deca for our new_calulated_maxmin
                    if maxmin_suffix == '_min':
                        agg_maxmin = new_calulated_maxmin + 1
                    else:
                        agg_maxmin = new_calulated_maxmin - 1

                # if (target_key == "wind"):
                #     if agg_j.__contains__(target_key + maxmin_suffix + '_time'):
                #         print("  wind_max: old_time=> " + str(agg_j[target_key + maxmin_suffix + '_time']) + ", new=> " + str(new_calulated_maxmin_time))
                #         print("  agg: id: " + str(agg_decas[idx_maxmin].data.id) + ', level: ' + agg_decas[idx_maxmin].agg_niveau + ', start_dat: ' + str(agg_decas[idx_maxmin].data.start_dat))
                #     else:
                #         print("  wind_max: old_time=> ** no data **, new=> " + str(new_calulated_maxmin_time))
                #         print("  agg: id: " + str(agg_decas[idx_maxmin].data.id) + ', level: ' + agg_decas[idx_maxmin].agg_niveau + ', start_dat: ' + str(agg_decas[idx_maxmin].data.start_dat))
                # compare the measure data and current maxmin
                b_change_maxmin = False
                if maxmin_suffix == '_max' and agg_maxmin < new_calulated_maxmin:
                    # if (target_key == "wind"):
                    #     print('   *** agg < new_calc')
                    b_change_maxmin = True

                # code to keep the latest _max/min_time in case of equality
                # if maxmin_suffix == '_max' and agg_maxmin == new_calulated_maxmin and str(agg_j[target_key + maxmin_suffix + '_time']) < str(new_calulated_maxmin_time):
                #     # if (target_key == "wind"):
                #     #     print('   *** max agg_time < new_calc_time -> yes')
                #     b_change_maxmin = True
                # if maxmin_suffix == '_max' and agg_maxmin == new_calulated_maxmin and str(agg_j[target_key + maxmin_suffix + '_time']) > str(new_calulated_maxmin_time):
                #     if (target_key == "wind"):
                #         print('   *** max agg_time > new_calc_time -> NO')
                if maxmin_suffix == '_min' and agg_maxmin > new_calulated_maxmin:
                    b_change_maxmin = True
                if maxmin_suffix == '_min' and agg_maxmin == new_calulated_maxmin and str(
                        agg_j[target_key + maxmin_suffix +
                              '_time']) > str(new_calulated_maxmin_time):
                    b_change_maxmin = True

                if b_change_maxmin:
                    agg_j[target_key + maxmin_suffix] = new_calulated_maxmin
                    dv_next[target_key + maxmin_suffix] = new_calulated_maxmin
                    agg_j[target_key + maxmin_suffix +
                          '_time'] = new_calulated_maxmin_time
                    dv_next[target_key + maxmin_suffix +
                            '_time'] = new_calulated_maxmin_time
                    if (isFlagged(my_measure['special'],
                                  MeasureProcessingBitMask.MeasureIsWind)):
                        if new_calulated_maxmin_dir is not None:
                            agg_j[target_key + maxmin_suffix +
                                  '_dir'] = new_calulated_maxmin_dir
                            dv_next[target_key + maxmin_suffix +
                                    '_dir'] = new_calulated_maxmin_dir