Пример #1
0
def shouldNullify(exclusion: json, src_key: str) -> bool:
    """
    shouldNullify
        Check if the exclusion requires to nullify the measure

    Parameter:
        exclusion: json coming from exclusion table
        src_key: key to check in the exclusion field
    """
    try:
        if exclusion is not None:
            if (exclusion.__contains__(src_key) is True and exclusion[src_key]
                    == "null") or exclusion.__contains__(src_key) is False:
                return True
        return False
    except Exception as e:
        if e.__dict__.__len__() == 0 or "done" not in e.__dict__:
            exception_type, exception_object, exception_traceback = sys.exc_info(
            )
            exception_info = e.__repr__()
            filename = exception_traceback.tb_frame.f_code.co_filename
            funcname = exception_traceback.tb_frame.f_code.co_name
            line_number = exception_traceback.tb_lineno
            e.info = {
                "i": str(exception_info),
                "n": funcname,
                "f": filename,
                "l": line_number,
            }
            e.done = True
        raise e
Пример #2
0
def delKey(j: json, key: str):
    """
    delKey
        delete a key in json if exists

    Parameter:
        j: json data
        key: key to delete if exists
    """
    try:
        if j.__contains__(key):
            del j[key]
    except Exception as e:
        if e.__dict__.__len__() == 0 or "done" not in e.__dict__:
            exception_type, exception_object, exception_traceback = sys.exc_info(
            )
            exception_info = e.__repr__()
            filename = exception_traceback.tb_frame.f_code.co_filename
            funcname = exception_traceback.tb_frame.f_code.co_name
            line_number = exception_traceback.tb_lineno
            e.info = {
                "i": str(exception_info),
                "n": funcname,
                "f": filename,
                "l": line_number,
            }
            e.done = True
        raise e
Пример #3
0
def addJson(j: json, key: str, valeur: int):
    """
    addJson

        add a value to j[key]

    Parameter:
        j: json field
        key: key to add
        valeur: value to add
    """
    try:
        if j.__contains__(key) is False:
            j[key] = 0
        j[key] += valeur
    except Exception as e:
        if e.__dict__.__len__() == 0 or "done" not in e.__dict__:
            exception_type, exception_object, exception_traceback = sys.exc_info(
            )
            exception_info = e.__repr__()
            filename = exception_traceback.tb_frame.f_code.co_filename
            funcname = exception_traceback.tb_frame.f_code.co_name
            line_number = exception_traceback.tb_lineno
            e.info = {
                "i": str(exception_info),
                "n": funcname,
                "f": filename,
                "l": line_number,
            }
            e.done = True
        raise e
Пример #4
0
 def loadDataInObs(
     self,
     my_measure: json,
     obs_meteor: ObsMeteor,
     target_key: str,
     delta_values: json,
     trace_flag: bool,
 ):
     if my_measure.__contains__('xyz') is False:
         # just to satisfy our parser... Will always fail
         raise Exception('loadDataInDV', 'should be in virtual func')
Пример #5
0
 def loadValuesFromCurrent(
     self,
     my_measure: json,
     json_file_data: json,
     measure_idx: int,
     src_key: str,
     target_key: str,
     exclusion: json,
     my_values: json,
     trace_flag: bool,
 ):
     if my_measure.__contains__('xyz') is False:
         # just to satisfy our parser... Will always fail
         raise Exception('loadDataInDV', 'should be in virtual func')
Пример #6
0
    def loadMaxMinInObs(
        self,
        my_measure: json,
        obs_meteor: ObsMeteor,
        target_key: str,
        delta_values: json,
        my_values: json,
        trace_flag: bool = False,
    ):
        """
            loadMaxMinInObsInObservation

            load in obs max/min value if present
            update delta_values

            calculus v2
        """
        obs_j = obs_meteor.data.j

        b_is_max = True
        for maxmin_key in ['max', 'min']:
            # is max or min needed for this measure
            maxmin_suffix = '_' + maxmin_key
            # if target_key == "rain_rate":
            #     target_key = "rain_rate"
            if my_measure.__contains__(maxmin_key) is True and my_measure[maxmin_key] is True:
                # propagate the check request
                if my_values.get(target_key + '_check' + maxmin_suffix) is not None:
                    obs_j[target_key + '_check' + maxmin_suffix] = my_values[target_key + '_check' + maxmin_suffix]

                if my_values.get(target_key + maxmin_suffix) is not None:
                    my_maxmin_value = my_values[target_key + maxmin_suffix]
                    my_maxmin_date = my_values[target_key + maxmin_suffix + '_time']
                    obs_j[target_key + maxmin_suffix] = my_maxmin_value
                    obs_j[target_key + maxmin_suffix + '_time'] = my_maxmin_date
                    delta_values[target_key + maxmin_suffix] = my_maxmin_value
                    delta_values[target_key + maxmin_suffix + '_time'] = my_maxmin_date
                    if my_values.get(target_key + maxmin_suffix + '_dir') is not None:
                        my_wind_dir = my_values[target_key + maxmin_suffix + '_dir']
                        obs_j[target_key + maxmin_suffix + '_dir'] = my_wind_dir
                        delta_values[target_key + maxmin_suffix + '_dir'] = my_wind_dir

            b_is_max = not(b_is_max)
Пример #7
0
    def loadDataInObs(
        self,
        my_measure: json,
        obs_meteor: ObsMeteor,
        target_key: str,
        delta_values: json,
        my_values: json,
        trace_flag: bool,
    ):
        """
            loadObservationDatarow

            load Observation dataRow from json measures

            load delta_values from json mesures
        """
        obs_j = obs_meteor.data.j

        if my_measure['target_key'] == 'barometer':
            my_value_avg = my_values.get(target_key + '_a')

        my_value_avg = my_values.get(target_key + '_a')
        my_value_instant = my_values.get(target_key + '_i')
        my_value_dir = my_values.get(target_key + '_di')
        tmp_duration = my_values.get(target_key + '_du')
        if isFlagged(my_measure['special'],
                     MeasureProcessingBitMask.MeasureIsOmm):
            tmp_duration = 60
        if my_value_avg is None:
            if my_value_instant is None:
                return
            else:
                my_value_avg = my_value_instant

        # get our value to aggregate
        if my_measure.__contains__('measureType'):
            if my_measure['measureType'] == 'inst':
                # for omm measure...
                my_value_aggreg = my_value_instant
            else:
                my_value_aggreg = my_value_avg
        else:
            if my_value_instant is not None:
                my_value_aggreg = my_value_instant

        tmp_s = my_value_aggreg * tmp_duration

        if my_value_aggreg is None:
            # no data suitable for us
            return

        # get current values from our aggregations
        tmp_s_old = tmp_duration_old = 0
        if obs_j.get(target_key + '_s') is not None:
            tmp_s_old = obs_j[target_key + '_s']
            if tmp_s == tmp_s_old:
                # no change on avg computation
                return
            tmp_duration_old = obs_j.get(target_key + '_duration')
            if tmp_duration_old is None:
                tmp_duration_old = tmp_duration
            if tmp_duration != tmp_duration_old:
                raise ('loadDataInObs', 'duration mitchmath for ' +
                       target_key + ': in obs' + str(tmp_duration_old) +
                       ', in json: ' + str(tmp_duration))
            delta_values[target_key + '_s_old'] = tmp_s_old
            delta_values[target_key + '_duration_old'] = tmp_duration_old

        # save data in dv
        delta_values[target_key + '_s'] = tmp_s
        delta_values[target_key + '_duration'] = tmp_duration

        # save data in obs
        obs_j[target_key] = my_value_aggreg

        if my_value_dir is not None:
            obs_j[target_key + '_dir'] = my_value_dir
Пример #8
0
    def loadDVDataInAggregation(self,
                                my_measure: json,
                                m_stop_dat: datetime,
                                agg_deca: AggMeteor,
                                m_agg_j: json,
                                delta_values: json,
                                dv_next: json,
                                trace_flag: bool = False,
                                agg_suffix: str = '_sum'):
        """
            loadDVDataInAggregation

            Load one aggretation level with values from delta_values, update dv_next

            parameters:
                my_measure: measure definition
                stop_dat: mesure stop_dat
                agg_deca
                m_agg_j: aggregations clause in json file
                delta_values: json for forced values
                dv_next: delta_values for next level
                flag: True=insert, False=delete
        """
        target_key = my_measure['target_key']
        agg_j = agg_deca.data.j
        agg_level = agg_deca.getLevelCode()
        has_data = False

        if target_key == 'rain_omm':
            has_data = False

        # load old measure values in case of an update of Observation, and only in agg_hour
        tmp_sum_old = tmp_duration_old = 0
        if delta_values.__contains__(target_key + '_sum_old'):
            tmp_sum_old = delta_values[target_key + '_sum_old']
            tmp_duration_old = delta_values[target_key + '_duration_old']
            has_data = True

        # ------------------------------------------------------------------
        # get our new data
        # 1 from dv[target_key + '_sum']
        # 2 from m_agg_j[target_key + '_sum']
        # last win
        # ------------------------------------------------------------------

        if delta_values.get('duration') is None:
            tmp_duration = getAggDuration(agg_level[0],
                                          agg_deca.data.start_dat)
        else:
            tmp_duration = float(delta_values["duration"])

        # get our M_s from our delta_values
        tmp_tmp = self.get_json_value(delta_values, target_key, [agg_suffix],
                                      None)
        if tmp_tmp is not None:
            has_data = True
            tmp_sum = float(tmp_tmp)
        if delta_values.get(target_key + '_duration') is not None:
            if isFlagged(my_measure['special'], MeasureProcessingBitMask.
                         MeasureIsOmm) and agg_level[0] == 'H':
                tmp_duration = getAggDuration(agg_level[0],
                                              agg_deca.data.start_dat)
            else:
                tmp_duration = delta_values[target_key + '_duration']

        synos = [target_key]
        if my_measure.get('syno') is not None:
            for a_syno in my_measure['syno']:
                synos.append(a_syno)

        b_value_found = False
        for a_key in synos:
            if b_value_found:
                continue
            tmp_tmp = self.get_json_value(m_agg_j, a_key, [agg_suffix], True)
            if tmp_tmp is not None:
                has_data = True
                tmp_sum = float(tmp_tmp)
                if m_agg_j.__contains__(a_key + '_duration') is True:
                    tmp_duration = float(m_agg_j[a_key + '_duration'])
                else:
                    tmp_duration = getAggDuration(agg_level[0],
                                                  agg_deca.data.start_dat)
                if isFlagged(my_measure['special'],
                             MeasureProcessingBitMask.MeasureIsOmm):
                    tmp_duration = getAggDuration(agg_level[0],
                                                  agg_deca.data.start_dat)
                b_value_found = True

        # return if the aggregation should not be sent to upper levels
        if has_data is False:
            return

        addJson(agg_j, target_key + agg_suffix, tmp_sum - tmp_sum_old)
        addJson(agg_j, target_key + '_duration',
                tmp_duration - tmp_duration_old)

        tmp_sum_new = agg_j[target_key + agg_suffix]
        tmp_duration_new = agg_j[target_key + '_duration']

        if tmp_duration_new == 0:
            # no duration, delete all keys
            delKey(agg_j, target_key + agg_suffix)
            delKey(agg_j, target_key + '_duration')
            delta_values[target_key + '_delete_me'] = True
        else:
            # Add omm values in agg_hour
            if isFlagged(my_measure['special'],
                         MeasureProcessingBitMask.MeasureIsOmm):
                agg_j[target_key] = tmp_sum_new

        if isFlagged(my_measure['special'],
                     MeasureProcessingBitMask.OnlyAggregateInHour) is True:
            return

        # propagate to next level if no limitation on aggregation level
        dv_next[target_key + agg_suffix] = tmp_sum - tmp_sum_old
        dv_next[target_key + '_duration'] = tmp_duration - tmp_duration_old
        if delta_values.get('duration') is None:
            dv_next['duration'] = tmp_duration
        else:
            dv_next["duration"] = delta_values['duration']
Пример #9
0
    def _loadValuesFromCurrent(
        self,
        my_measure: json,
        json_file_data: json,
        measure_idx: int,
        src_key: str,
        target_key: str,
        exclusion: json,
        my_values: json,
        key_suffix: str,
        stop_dat: datetime,
        trace_flag: bool,
    ):
        """
            Load ou values in delta_value for value passing to loadDataInObs, and loadMaxMinInObsInObservation
        """
        # b_exclu = True -> load data from exclusion, False -> normal processing
        if src_key == 'rain_rate':
            b_exclu = loadFromExclu(exclusion, src_key)

        b_exclu = loadFromExclu(exclusion, src_key)

        my_value_instant = my_value_avg = None
        my_value_dir = None

        if my_measure['target_key'] == 'barometer':
            measure_type = 3

        measure_type = 3
        if my_measure.__contains__('measureType'):
            if my_measure['measureType'] == 'avg':
                measure_type = 1
            elif my_measure['measureType'] == 'inst':
                measure_type = 2
            elif my_measure['measureType'] != 'both':
                raise Exception('processJsonDataAvg::loadDataInObs', 'invalid measureType: ' + my_measure['measureType'] + ' for ' + src_key)

        if b_exclu is False:
            # load our data from the measure (json)
            data_src = json_file_data['data'][measure_idx].get('current')
        else:
            data_src = exclusion

        all_src_keys = [my_measure['src_key']]
        if my_measure.get('syno') is not None:
            for a_syno in my_measure['syno']:
                all_src_keys.append(a_syno)
        value_found = False
        for a_srckey in all_src_keys:
            if value_found:
                continue
            if data_src is not None:
                if data_src.__contains__(a_srckey) and (measure_type & 2) == 2:
                    my_value_instant = self.get_json_value(data_src, a_srckey, [], True)
                    value_found = True
                if data_src.__contains__(a_srckey + key_suffix) and (measure_type & 1) == 1:
                    my_value_avg = self.get_json_value(data_src, a_srckey + key_suffix, [], True)
                    value_found = True
                if (isFlagged(my_measure['special'], MeasureProcessingBitMask.MeasureIsWind)):
                    if data_src.__contains__(a_srckey + '_dir'):
                        my_value_dir = data_src[a_srckey + '_dir']
                    elif data_src.__contains__(a_srckey + key_suffix + '_dir'):
                        my_value_dir = data_src[a_srckey + '_dir']

        # init value instantaneout and avg
        # measure_type         I      1      I      2      I      3
        # measure_avg (ma)     I ma, then mi I     None    I ma, then mi
        # measure_instant (mi) I     None    I mi, then ma I mi, then ma

        if measure_type == 1:
            if my_value_avg is None:
                my_value_avg = my_value_instant
            if measure_type == 1:
                my_value_instant = None
        if measure_type == 2:
            if my_value_instant is None:
                my_value_instant = my_value_avg
            if measure_type == 2:
                my_value_avg = None
        if measure_type == 3:
            if my_value_avg is None:
                my_value_avg = my_value_instant
            if my_value_instant is None:
                my_value_instant = my_value_avg

        if my_value_avg is not None:
            my_values[target_key + '_a'] = my_value_avg
        if my_value_instant is not None:
            my_values[target_key + '_i'] = my_value_instant
        if my_value_dir is not None:
            my_values[target_key + '_di'] = my_value_dir
        tmp_duration = int(json_file_data['data'][measure_idx]['current']['duration'])
        if isFlagged(my_measure['special'], MeasureProcessingBitMask.MeasureIsOmm):
            tmp_duration = 60
        if tmp_duration != 0:
            my_values[target_key + '_du'] = tmp_duration

        # load max/min from json
        for maxmin_key in ['max', 'min']:
            if my_measure.__contains__(maxmin_key) is True and my_measure[maxmin_key] is True:
                maxmin_suffix = '_' + maxmin_key
                if data_src is not None and data_src.get(src_key + maxmin_suffix) is not None:
                    my_values[target_key + maxmin_suffix] = data_src[src_key + maxmin_suffix]
                    my_values[target_key + maxmin_suffix + '_time'] = data_src[src_key + maxmin_suffix + '_time']
                    if maxmin_key == 'max' and (isFlagged(my_measure['special'], MeasureProcessingBitMask.MeasureIsWind)):
                        if data_src is not None and data_src.get(src_key + maxmin_suffix + '_dir') is not None:
                            my_values[target_key + maxmin_suffix + '_dir'] = data_src[src_key + maxmin_suffix + '_dir']
                else:
                    if my_value_avg is not None or my_value_instant is not None:
                        my_values[target_key + maxmin_suffix] = my_value_instant if my_value_instant is not None else my_value_avg
                        if data_src is not None and data_src.get(src_key + maxmin_suffix + "_time") is not None:
                            my_values[target_key + maxmin_suffix + '_time'] = data_src[src_key + maxmin_suffix + '_time']
                        else:
                            my_values[target_key + maxmin_suffix + '_time'] = stop_dat
Пример #10
0
    def loadDVMaxMinInAggregation(
        self,
        my_measure: json,
        m_stop_date: datetime,
        agg_decas: AggMeteor,
        m_agg_j: json,
        delta_values: json,
        dv_next: json,
        trace_flag: bool = False,
    ):
        """
            loadDVMaxMinInAggregation

            load in all aggregations max/min
            update dv_next for nest level
        """
        target_key = my_measure['target_key']
        idx_maxmin = 0
        for maxmin_suffix in ['_max', '_min']:
            idx_maxmin += 1
            agg_j = agg_decas[idx_maxmin].data.j

            maxmin_key = maxmin_suffix.split('_')[1]

            if my_measure.__contains__(
                    maxmin_key) and my_measure[maxmin_key] is True:
                new_calulated_maxmin = None
                new_calulated_maxmin_dir = None

                if target_key == "wind" and maxmin_key == 'max':
                    target_key = "wind"

                # check if measure was deleted
                if delta_values.__contains__(target_key +
                                             '_delete_me') is True:
                    # measure was deleted previously
                    if agg_j.__contains__(target_key + maxmin_suffix):
                        # need to invalidate this value for next level
                        invalid_value = agg_j[target_key + maxmin_suffix]
                        dv_next[target_key + maxmin_suffix +
                                '_invalidate'] = invalid_value
                        dv_next[target_key + '_check' +
                                maxmin_suffix] = agg_j[target_key +
                                                       maxmin_suffix]
                    delKey(agg_j, target_key + maxmin_suffix)
                    delKey(agg_j, target_key + maxmin_suffix + '_time')
                    continue

                # get the current max/min (the last load win)
                # - load current value form delta_values
                # - load current max/min from delta_values if given
                # - load from "aggregations" clause in the json data file if given
                if delta_values.__contains__(target_key):
                    new_calulated_maxmin = my_measure['dataType'](
                        delta_values[target_key])
                    new_calulated_maxmin_time = m_stop_date
                    if (isFlagged(my_measure['special'],
                                  MeasureProcessingBitMask.MeasureIsWind)):
                        if delta_values.__contains__(target_key +
                                                     maxmin_suffix +
                                                     '_dir') is True:
                            new_calulated_maxmin_dir = float(
                                delta_values[target_key + maxmin_suffix +
                                             '_dir'])

                if delta_values.__contains__(target_key +
                                             maxmin_suffix) is True:
                    new_calulated_maxmin = my_measure['dataType'](
                        delta_values[target_key + maxmin_suffix])
                    new_calulated_maxmin_time = delta_values[target_key +
                                                             maxmin_suffix +
                                                             '_time']
                    if (isFlagged(my_measure['special'],
                                  MeasureProcessingBitMask.MeasureIsWind)):
                        if delta_values.__contains__(target_key +
                                                     maxmin_suffix +
                                                     '_dir') is True:
                            new_calulated_maxmin_dir = float(
                                delta_values[target_key + maxmin_suffix +
                                             '_dir'])

                if m_agg_j.__contains__(target_key + maxmin_suffix):
                    new_calulated_maxmin = my_measure['dataType'](
                        m_agg_j[target_key + maxmin_suffix])
                    new_calulated_maxmin_time = m_agg_j[target_key +
                                                        maxmin_suffix +
                                                        '_time']
                    if (isFlagged(my_measure['special'],
                                  MeasureProcessingBitMask.MeasureIsWind)):
                        if m_agg_j.__contains__(target_key + maxmin_suffix +
                                                '_dir') is True:
                            new_calulated_maxmin_dir = float(
                                m_agg_j[target_key + maxmin_suffix + '_dir'])

                if new_calulated_maxmin is None:
                    # should never occurs...
                    continue

                # load current max/min value from our aggregation
                agg_maxmin = None
                if agg_j.__contains__(target_key + maxmin_suffix):
                    agg_maxmin = agg_j[target_key + maxmin_suffix]
                """
                invalidation decision tree for [field]_max

                new_calulated_maxmin  +   agg_maxmin  +  former_maxmin_value +  action
                basic
                    10                +               +           No          +  update
                    10                +       5       +           No          +  update
                    10                +       10      +           No          +  pass
                    10                +       15      +           No          +  pass

                check_maxmin flag
                    10                +       5       +           5           +  update -> auto
                    10                +       10      +           10          +  pass -> auto
                    10                +       15      +           15          +  recompute
                """
                if delta_values.__contains__(target_key + '_check' +
                                             maxmin_suffix):
                    if agg_maxmin is None:
                        raise Exception(
                            'loadDVMaxMinInAggregation',
                            'Invalidate and no data in aggregation...')
                    former_maxmin_value = delta_values[target_key + '_check' +
                                                       maxmin_suffix]
                    if maxmin_suffix == '_max':
                        if agg_maxmin > former_maxmin_value:
                            agg_maxmin = None
                            dv_next[target_key + '_check' +
                                    maxmin_suffix] = former_maxmin_value
                            self.add_new_maxmin_fix(
                                target_key, maxmin_key,
                                my_measure['deca' + maxmin_suffix],
                                agg_decas[idx_maxmin], delta_values)
                    else:
                        if agg_maxmin < former_maxmin_value:
                            agg_maxmin = None
                            dv_next[target_key + '_check' +
                                    maxmin_suffix] = former_maxmin_value
                            self.add_new_maxmin_fix(
                                target_key, maxmin_key,
                                my_measure['deca' + maxmin_suffix],
                                agg_decas[idx_maxmin], delta_values)

                if agg_maxmin is None:
                    # force the update i agg_deca for our new_calulated_maxmin
                    if maxmin_suffix == '_min':
                        agg_maxmin = new_calulated_maxmin + 1
                    else:
                        agg_maxmin = new_calulated_maxmin - 1

                # if (target_key == "wind"):
                #     if agg_j.__contains__(target_key + maxmin_suffix + '_time'):
                #         print("  wind_max: old_time=> " + str(agg_j[target_key + maxmin_suffix + '_time']) + ", new=> " + str(new_calulated_maxmin_time))
                #         print("  agg: id: " + str(agg_decas[idx_maxmin].data.id) + ', level: ' + agg_decas[idx_maxmin].agg_niveau + ', start_dat: ' + str(agg_decas[idx_maxmin].data.start_dat))
                #     else:
                #         print("  wind_max: old_time=> ** no data **, new=> " + str(new_calulated_maxmin_time))
                #         print("  agg: id: " + str(agg_decas[idx_maxmin].data.id) + ', level: ' + agg_decas[idx_maxmin].agg_niveau + ', start_dat: ' + str(agg_decas[idx_maxmin].data.start_dat))
                # compare the measure data and current maxmin
                b_change_maxmin = False
                if maxmin_suffix == '_max' and agg_maxmin < new_calulated_maxmin:
                    # if (target_key == "wind"):
                    #     print('   *** agg < new_calc')
                    b_change_maxmin = True

                # code to keep the latest _max/min_time in case of equality
                # if maxmin_suffix == '_max' and agg_maxmin == new_calulated_maxmin and str(agg_j[target_key + maxmin_suffix + '_time']) < str(new_calulated_maxmin_time):
                #     # if (target_key == "wind"):
                #     #     print('   *** max agg_time < new_calc_time -> yes')
                #     b_change_maxmin = True
                # if maxmin_suffix == '_max' and agg_maxmin == new_calulated_maxmin and str(agg_j[target_key + maxmin_suffix + '_time']) > str(new_calulated_maxmin_time):
                #     if (target_key == "wind"):
                #         print('   *** max agg_time > new_calc_time -> NO')
                if maxmin_suffix == '_min' and agg_maxmin > new_calulated_maxmin:
                    b_change_maxmin = True
                if maxmin_suffix == '_min' and agg_maxmin == new_calulated_maxmin and str(
                        agg_j[target_key + maxmin_suffix +
                              '_time']) > str(new_calulated_maxmin_time):
                    b_change_maxmin = True

                if b_change_maxmin:
                    agg_j[target_key + maxmin_suffix] = new_calulated_maxmin
                    dv_next[target_key + maxmin_suffix] = new_calulated_maxmin
                    agg_j[target_key + maxmin_suffix +
                          '_time'] = new_calulated_maxmin_time
                    dv_next[target_key + maxmin_suffix +
                            '_time'] = new_calulated_maxmin_time
                    if (isFlagged(my_measure['special'],
                                  MeasureProcessingBitMask.MeasureIsWind)):
                        if new_calulated_maxmin_dir is not None:
                            agg_j[target_key + maxmin_suffix +
                                  '_dir'] = new_calulated_maxmin_dir
                            dv_next[target_key + maxmin_suffix +
                                    '_dir'] = new_calulated_maxmin_dir
Пример #11
0
def _checkJsonOneItem(j: json, pid: int, meteor: str,
                      stop_dat_list: list) -> str:
    try:
        idx = 0
        val_to_add = []
        val_to_add_agg = []
        val_to_add_val = []

        j["poste_id"] = pid

        if j.__contains__("meteor") is False or j["meteor"].__len__() == 0:
            return "missing or invalid code meteor"

        if j["meteor"] != meteor:
            return "different code meteor: " + meteor + "/" + j["meteor"]

        while idx < j["data"].__len__():
            new_val = {}
            new_val_agg = {}
            new_val_val = {}
            a_current = j["data"][idx].get("current")
            tmp_stop_dat = j["data"][idx].get("stop_dat")

            if a_current is None:
                if (j["data"][idx].__contains__("aggregations") is False and
                        j["data"][idx].__contains__("validation") is False):
                    return (
                        "no current/aggregations/validation key in j.data[" +
                        str(idx) + "]")
            else:
                # check current key
                if a_current.__contains__("duration") is False:
                    return "no duration in j.data[" + str(idx) + "].current"
                measure_duration = a_current["duration"]

                tmp_stop_dat = None
                if j["data"][idx].__contains__("stop_dat") is False:
                    if j["data"][idx].__contains__("start_dat") is False:
                        return "no start and stop_dat in j.data[" + str(
                            idx) + "].current"
                    measure_duration = datetime.timedelta(
                        minutes=int(a_current["duration"]))
                    new_val = {
                        "k": "stop_dat",
                        "v": j["data"][idx]["start_dat"] + measure_duration,
                        "idx": idx,
                    }
                    val_to_add.append(new_val)
                    tmp_stop_dat = new_val["stop_dat"]
                else:
                    tmp_stop_dat = j["data"][idx]["stop_dat"]

                if str(tmp_stop_dat) in stop_dat_list:
                    return "stop_dat: " + str(tmp_stop_dat) + " present twice"
                stop_dat_list.append(str(tmp_stop_dat))

                if j["data"][idx].__contains__("start_dat") is False:
                    measure_duration = datetime.timedelta(
                        minutes=int(a_current["duration"]))
                    new_val = {
                        "k": "start_dat",
                        "v": j["data"][idx]["stop_dat"],
                        "idx": idx,
                    }
                    val_to_add.append(new_val)

                for key in a_current.__iter__():
                    if str(key).endswith("_max") or str(key).endswith("_min"):
                        if a_current.__contains__(
                                key +
                                "time") is True and a_current.__contains__(
                                    key + "_time") is False:
                            a_current[key + "_time"] = a_current[key + "time"]
                            new_val = {
                                "k": key + "_time",
                                "v": a_current[key + "time"],
                                "idx": idx,
                                "k2": "current",
                            }
                            val_to_add.append(new_val)

                        if a_current.__contains__(key + "_time") is False:
                            new_val = {
                                "k": key + "_time",
                                "v": j["data"][idx]["stop_dat"],
                                "idx": idx,
                                "k2": "current",
                            }
                            val_to_add.append(new_val)
                    if str(key).endswith("_sum"):
                        new_val = {
                            "k": str(key).replace("_sum", "_s"),
                            "v": a_current[key],
                            "idx": idx,
                            "k2": "current",
                        }
                        val_to_add.append(new_val)

                    if (str(key).endswith("_s")
                            and a_current.__contains__(key[:-4] + "_duration")
                            is False):
                        new_val = {
                            "k": key[:-4] + "_duration",
                            "v": measure_duration,
                            "idx": idx,
                            "k2": "current",
                        }
                        val_to_add.append(new_val)

                # old specification...
                if j["data"][idx]["current"].__contains__("aggregations"):
                    return "aggregations is under the key current, should be at same level"

            if tmp_stop_dat is None:
                return "no stop_dat, and no way to compute it"

            all_aggreg = j["data"][idx].get("aggregations")
            if all_aggreg is not None:
                idx2 = 0
                while idx2 < all_aggreg.__len__():
                    a_aggreg = j["data"][idx]["aggregations"][idx2]
                    if a_aggreg.__contains__("level") is False:
                        return ("no level in data[" + str(idx) +
                                "].aggregations[" + str(idx2) + "]")
                    lvl = a_aggreg["level"]
                    if (lvl != "H" and lvl != "D" and lvl != "M" and lvl != "Y"
                            and lvl != "A"):
                        return (lvl + " is invalid level in data[" + str(idx) +
                                "].aggregations[" + str(idx2) + "]")
                    for key in a_aggreg.__iter__():
                        if str(key).endswith(
                                "_sum") and str(key).endswith("_s") is False:
                            new_val_agg = {
                                "k": str(key).replace("_sum", "_s"),
                                "v": a_aggreg[key],
                                "idx": idx,
                                "idx2": idx2,
                            }
                            val_to_add_agg.append(new_val_agg)

                        if str(key).endswith("_max") or str(key).endswith(
                                "_min"):
                            if all_aggreg.__contains__(key + "_time") is False:
                                new_val_agg = {
                                    "k": key + "_time",
                                    "v": j["data"][idx]["stop_dat"],
                                    "idx": idx,
                                    "idx2": idx2,
                                }
                                val_to_add_agg.append(new_val_agg)
                    idx2 += 1

            all_validations = j["data"][idx].get("validation")
            if all_validations is not None:
                idx2 = 0
                while idx2 < all_validations.__len__():
                    a_aggreg = j["data"][idx]["validation"][idx2]

                    if a_aggreg.__contains__("level") is False:
                        return ("no level in data[" + str(idx) +
                                "].validation[" + str(idx2) + "]")
                    lvl = a_aggreg["level"]
                    if (lvl != "H" and lvl != "D" and lvl != "M" and lvl != "Y"
                            and lvl != "A"):
                        return (lvl + " is invalid level in data[" + str(idx) +
                                "].validation[" + str(idx2) + "]")
                    for key in all_validations[idx2].__iter__():
                        # if str(key) == 'out_temp_omm_min':
                        #     key = 'out_temp_omm_min'
                        if str(key).endswith("time") and str(key).endswith(
                                "_time") is False:
                            new_val_val = {
                                "k": str(key).replace("time", "_time"),
                                "v": all_validations[idx2][key],
                                "idx2": idx2,
                                "idx": idx,
                            }
                            val_to_add_val.append(new_val_val)

                        # fix XX_sum -> XX_s
                        if str(key).endswith(
                                "_sum") and str(key).endswith("_s") is False:
                            new_val_val = {
                                "k": str(key).replace("_sum", "_s"),
                                "v": all_validations[idx2][key],
                                "idx2": idx2,
                                "idx": idx,
                            }
                            val_to_add_val.append(new_val_val)

                        if str(key).endswith("_max") or str(key).endswith(
                                "_min"):
                            if all_validations[idx2].__contains__(
                                    key + "time") is True and all_validations[
                                        idx2].__contains__(key +
                                                           "_time") is False:
                                new_val_val = {
                                    "k": key + '_time',
                                    "v": all_validations[idx2][key],
                                    "idx2": idx2,
                                    "idx": idx,
                                }
                                val_to_add_val.append(new_val_val)

                            if all_validations[idx2].__contains__(
                                    key + "_time") is False:
                                new_val_val = {
                                    "k": key + "_time",
                                    "v": j["data"][idx]["stop_dat"],
                                    "idx2": idx2,
                                    "idx": idx,
                                }
                                val_to_add_val.append(new_val_val)

                    idx2 += 1
            idx += 1

        # add missing key/value
        for my_val in val_to_add:
            my_data = j["data"][my_val["idx"]]
            if my_val.get("k2") is None:
                my_data[my_val["k"]] = my_val["v"]
            else:
                my_data[my_val["k2"]][my_val["k"]] = my_val["v"]

        for my_val in val_to_add_agg:
            my_aggregations = j["data"][my_val["idx"]]["aggregations"]
            my_aggregations[my_val["idx2"]][my_val["k"]] = my_val["v"]

        for my_val in val_to_add_val:
            my_validation = j["data"][my_val["idx"]]["validation"]
            my_validation[my_val["idx2"]][my_val["k"]] = my_val["v"]
        return None
    except Exception as e:
        if e.__dict__.__len__() == 0 or "done" not in e.__dict__:
            exception_type, exception_object, exception_traceback = sys.exc_info(
            )
            exception_info = e.__repr__()
            filename = exception_traceback.tb_frame.f_code.co_filename
            funcname = exception_traceback.tb_frame.f_code.co_name
            line_number = exception_traceback.tb_lineno
            e.info = {
                "i": str(exception_info),
                "n": funcname,
                "f": filename,
                "l": line_number,
            }
            e.done = True
        raise e