Exemplo n.º 1
0
    def loadInObs(self, poste_metier, my_measure: json, json_file_data: json, measure_idx: int, m_agg_j: json, obs_meteor: ObsMeteor, delta_values: json, trace_flag: bool = False):
        """
            processObservation

            load json data in Observation table

            load max/min

            return the delta_values to be added in all aggregations

            some methods are implemented here, some in the inherited class
        """

        # load field if defined in json
        src_key = my_measure['src_key']
        target_key = my_measure['target_key']

        # get exclusion, and return if value is nullified
        exclusion = poste_metier.exclusion(my_measure['type_i'])
        # to check later...
        # if shouldNullify(exclusion, src_key) is True:
        #     return

        my_values = {}
        if target_key == "rain_rate":
            target_key = "rain_rate"

        self.loadValuesFromCurrent(my_measure, json_file_data, measure_idx, src_key, target_key, exclusion, my_values, obs_meteor.data.stop_dat, trace_flag)

        if my_values.__len__() == 0 and m_agg_j.__len__() == 0:
            return

        if (isFlagged(my_measure['special'], MeasureProcessingBitMask.NotAllowedInCurrent) is True):
            return

        # load Json data in dv

        # update duration & agg_start_dat in obs if needed
        if obs_meteor.data.duration == 0 and my_values.__len__() > 1:
            tmp_duration = delta_values.get(target_key + '_du')
            obs_meteor.data.duration = tmp_duration
            # compute our agg_h.start_dat for faster retrieval of observation for a given agg_h.start_dat
            obs_meteor.data.agg_start_dat = calcAggDate('H', obs_meteor.data.stop_dat, tmp_duration, True)

            # double check that the duration are compatible
            if obs_meteor.data.duration != tmp_duration:
                raise Exception('loadObsDatarow', 'incompatible durations -> in table obs: ' + str(obs_meteor.data.duration) + ', in json: ' + str(tmp_duration))

        # load data from dv to obs
        self.loadDataInObs(my_measure, obs_meteor, target_key, delta_values, my_values, trace_flag)

        # check maxmin that need to be regenated later
        self.checkMaxMinToRegenerate(my_measure, obs_meteor, target_key, delta_values, my_values, trace_flag)

        # load Max/Min in obs, and in dv
        self.loadMaxMinInObs(my_measure, obs_meteor, target_key, delta_values, my_values, trace_flag)
        return
Exemplo n.º 2
0
def checkJson(j_arr: json, meteor: str = "???", filename: str = "???") -> str:
    """
    checkJson
        Check Json integrity

    Parameter:
        Json data
    """
    try:
        if j_arr[0].__contains__(
                "meteor") is False or j_arr[0]["meteor"].__len__() == 0:
            return "missing or invalid code meteor"

        pid = PosteMeteor.getPosteIdByMeteor(j_arr[0]["meteor"])
        if pid is None:
            return "code meteor inconnu: " + j_arr[0]["meteor"]

        stop_dat_list = []
        idx = 0
        while idx < j_arr.__len__():
            j = j_arr[idx]
            ret = _checkJsonOneItem(j, pid, j_arr[0]["meteor"], stop_dat_list)
            if ret is not None:
                return "Error in item " + str(idx) + ": " + ret
            idx += 1
    except Exception as e:
        if e.__dict__.__len__() == 0 or "done" not in e.__dict__:
            exception_type, exception_object, exception_traceback = sys.exc_info(
            )
            exception_info = e.__repr__()
            filename = exception_traceback.tb_frame.f_code.co_filename
            funcname = exception_traceback.tb_frame.f_code.co_name
            line_number = exception_traceback.tb_lineno
            e.info = {
                "i": str(exception_info),
                "n": funcname,
                "f": filename,
                "l": line_number,
            }
            e.done = True
        raise e
Exemplo n.º 3
0
    def _loadJsonArrayInObs(
        self,
        json_data_array: json,
        trace_flag: bool = False,
        is_tmp: bool = None,
        use_validation: bool = False,
        filename: str = "????",
    ) -> json:
        """
        processJson

        calulus v2, load json in the obs & agg_toto tables
        """
        debut_full_process = datetime.datetime.now()
        ret_data = []
        item_processed = 0
        all_item_processed = 0
        idx = 0
        meteor = "???"

        with self.tracer.start_as_current_span("Load Obs",
                                               trace_flag) as my_span:
            try:
                # validate our json
                meteor = str(json_data_array[0].get("meteor"))
                check_result = checkJson(json_data_array)
                if check_result is not None:
                    raise Exception("Meteor: " + meteor + ", filenme: " +
                                    filename + str(check_result))

                while idx < json_data_array.__len__():
                    try:
                        # start_time = datetime.datetime.now()
                        json_file_data = json_data_array[idx]
                        if idx == 0:
                            my_span.set_attribute("meteor", meteor)
                            my_span.set_attribute("filename", filename)

                        item_processed = json_file_data["data"].__len__()
                        all_item_processed += item_processed

                        self._loadJsonItemInObs(json_file_data, trace_flag,
                                                is_tmp, use_validation,
                                                filename)

                        # duration = datetime.datetime.now() - start_time
                        # dur_millisec = round(duration.total_seconds() * 1000)

                        # my_span.set_attribute("items_" + str(idx), item_processed)
                        # my_span.set_attribute("timeExec_" + str(idx), dur_millisec)
                        # ret_data.append(ret)

                    finally:
                        idx += 1

                global_duration = datetime.datetime.now() - debut_full_process
                dur_millisec = round(global_duration.total_seconds() * 1000)
                one_exec = round(dur_millisec / all_item_processed)
                ret_data.append({
                    "total_exec": dur_millisec,
                    "item_processed": all_item_processed,
                    "one_exec": one_exec,
                })
                t.logInfo(
                    "Json file loaded",
                    my_span,
                    {
                        "filename": filename,
                        "timeExec": dur_millisec,
                        "avgItemExec": one_exec,
                        "items": all_item_processed,
                        "meteor": meteor
                    },
                )

            except Exception as exc:
                my_span.record_exception(exc)
                raise exc

            finally:
                self.tracer.end_span()

        return ret_data