예제 #1
0
    def Stop(self):
        if self.IsRunning() is False:
            return
        for a_worker in WorkerRoot.wrks:
            if a_worker['name'] == self.name:
                try:
                    if a_worker['run'] is False:
                        raise Exception('workers::Stop', self.display + ' already stopped')

                    # Stop this worker first
                    self.killFlag = True
                    self.eventRunMe.set()
                    t.logInfo('Stop command received', None, {"svc": self.display, "status": "stopped"})

                    # Notify service handler
                    try:
                        a_worker['fct']({"param": {"StopMe": True}})
                    except Exception as exc:
                        with self.tracer.start_as_current_span(self.display, True) as my_span:
                            my_span.record_exception(exc)
                    #  Bye
                    return

                except Exception as exc:
                    t.logError('Stop ' + self.display + ": Exception", None, {"exception": str(exc)})
                    raise exc

                finally:
                    with WorkerRoot.wrks_lock:
                        a_worker['run'] = False
예제 #2
0
    def Start(self):
        try:
            if self.IsRunning() is True:
                return
            WorkerRoot.wrks_lock.acquire()
            for a_worker in WorkerRoot.wrks:
                if a_worker['name'] == self.name:
                    try:
                        if a_worker['run'] is True:
                            # just return if the task already running
                            return
                        self.killFlag = False
                        thread = threading.Thread(target=self.__runSvc, args=(a_worker,), daemon=True)
                        thread.setName(self.name)
                        if self.GetTraceFlag() is True:
                            t.logInfo('svc thread started', None, {"svc": self.display, "status": "started"})
                        thread.start()
                        a_worker['run'] = True
                        # force the thread to run once
                        time.sleep(1)
                        self.queueRun.put({})
                        self.eventRunMe.set()
                        return

                    except Exception as exc:
                        a_worker['run'] = False
                        t.logError('Start ' + self.display + ": Exception", None, {"exception": str(exc)})
                        raise exc

        finally:
            WorkerRoot.wrks_lock.release()
예제 #3
0
    def handle(self, *args, **options):
        if options['filename']:
            t.logInfo('loadJson started: ' + str(options['filename']))
        else:
            t.lofInfo('loadJson started: all files !!!')

        if options['delete']:
            delete_flag = True
        else:
            delete_flag = False

        if options['trace']:
            trace_flag = True
        else:
            trace_flag = False

        if options['tmp']:
            is_tmp = True
            # set the delete_flag if not given
        else:
            is_tmp = False

        if options['nodel']:
            delete_flag = False

        no_aggreg = False
        if options['noaggreg']:
            no_aggreg = True

        validation_flag = False
        if options['validation']:
            validation_flag = True

        base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
        b_file_found = False
        local_dir = './data/json_not_in_git'
        if hasattr(settings, "LOCAL_DIR") is True:
            local_dir = settings.LOCAL_DIR
        for a_file in glob.glob(base_dir + '/../.' + local_dir + '/*.json'):
            if options['filename'] == '*' or a_file.endswith(
                    options['filename']):
                b_file_found = True
                self.callRemoteObsSvc(a_file, delete_flag, trace_flag, is_tmp,
                                      validation_flag)

        if b_file_found is False:
            self.stderr.write('no file found, exiting')
            return

        # compute aggregations if needed
        if no_aggreg is False:
            if is_tmp is True:
                call_command('svc', 'aggreg', '--run', '--tmp')
            else:
                call_command('svc', 'aggreg', '--run')
예제 #4
0
 def SetTraceFlag(self, trace_flag: bool):
     for a_trc in WorkerRoot.trace_flag:
         if a_trc['s'] == self.name:
             a_trc['trace_flag'] = trace_flag
             if trace_flag:
                 t.logInfo(
                     'task ' + self.display + ' setTraceFlag',
                     None,
                     {'traceFlag': trace_flag}
                 )
             return
     raise Exception('workerRoot::SetTraceFlag', 'service ' + self.display + ' not found')
예제 #5
0
    def __processTodo(self, a_todo, is_tmp: bool = False):
        """ Aggregate data from an agg_todo """

        time_start = datetime.datetime.now()
        all_instr = AllTypeInstruments()
        if RefManager.GetInstance().GetRef("calcAggreg_trace_flag") is None:
            trace_flag = False
        else:
            trace_flag = RefManager.GetInstance().GetRef(
                "calcAggreg_trace_flag")

        span_name = "Calc agg"
        if is_tmp is True:
            span_name += "_tmp"

        with self.tracer.start_as_current_span(span_name,
                                               trace_flag) as my_span:
            my_span.set_attribute("obsId", a_todo.data.obs_id)
            my_span.set_attribute("stopDat", str(a_todo.data.obs.stop_dat))
            my_span.set_attribute("meteor", a_todo.data.obs.poste.meteor)
            my_span.set_attribute("isTmp", is_tmp)

            # retrieve posteMetier and needed aggregations
            span_load_data = self.tracer.start_span(
                "cache needed aggregations", trace_flag)
            m_stop_dat = a_todo.data.obs.stop_dat
            a_start_dat = a_todo.data.obs.agg_start_dat
            poste_metier = PosteMetier(a_todo.data.obs.poste_id, a_start_dat)
            poste_metier.lock()
            aggregations = poste_metier.aggregations(a_todo.data.obs_id,
                                                     m_stop_dat, True, is_tmp)
            span_load_data.set_attribute("items", aggregations.__len__())
            span_load_data.end()

            try:
                idx_delta_value = -1

                for delta_values in a_todo.data.j_dv:
                    idx_delta_value += 1

                    for an_agg in aggregations:
                        # add duration in main aggregations (the one with no deca...)
                        an_agg.add_duration(delta_values["duration"])

                    for anAgg in getAggLevels(is_tmp):
                        with self.tracer.start_span("level " + anAgg,
                                                    trace_flag) as span_lvl:
                            b_insert_start_dat = True
                            if idx_delta_value > 0:
                                span_lvl.set_attribute("deltaValueIdx",
                                                       idx_delta_value)
                            # adjust start date, depending on the aggregation level

                            # dv_next is the delta_values for next level
                            dv_next = {"maxminFix": []}

                            # for all type_instruments
                            for an_intrument in all_instr.get_all_instruments(
                            ):
                                # for all measures
                                for my_measure in an_intrument[
                                        "object"].get_all_measures():
                                    # load the needed aggregation for this measure
                                    agg_decas = self.load_aggregations_in_array(
                                        my_measure, anAgg, aggregations,
                                        m_stop_dat)

                                    m_agg_j = self.get_agg_magg(
                                        anAgg, a_todo.data.obs.j_agg)

                                    if b_insert_start_dat:
                                        b_insert_start_dat = False
                                        span_lvl.set_attribute(
                                            "startDat",
                                            str(agg_decas[0].data.start_dat))

                                    # find the calculus object for my_mesure
                                    for a_calculus in self.all_calculus:
                                        if a_calculus["agg"] == my_measure[
                                                "agg"]:
                                            if a_calculus[
                                                    "calc_agg"] is not None:
                                                # load data in our aggregation
                                                a_calculus[
                                                    "calc_agg"].loadDVDataInAggregation(
                                                        my_measure, m_stop_dat,
                                                        agg_decas[0], m_agg_j,
                                                        delta_values, dv_next,
                                                        trace_flag)

                                                # get our extreme values
                                                a_calculus[
                                                    "calc_agg"].loadDVMaxMinInAggregation(
                                                        my_measure, m_stop_dat,
                                                        agg_decas, m_agg_j,
                                                        delta_values, dv_next,
                                                        trace_flag)
                                            break

                            # loop to the next AggLevel
                            delta_values = dv_next

                # save our aggregations for this delta_values
                with self.tracer.start_span("saveData", trace_flag):
                    for an_agg in aggregations:
                        an_agg.save()

                    # a_todo.data.status = 999
                    # a_todo.save()
                    a_todo.delete()

                    # we're done
                    duration = datetime.datetime.now() - time_start
                    dur_millisec = round(duration.total_seconds() * 1000)
                    t.logInfo(
                        "Aggregation computed",
                        my_span,
                        {
                            "obsId": a_todo.data.obs_id,
                            "meteor": a_todo.data.obs.poste.meteor,
                            "queueLength": AggTodoMeteor.count(),
                            "timeExec": dur_millisec,
                        },
                    )
            finally:
                poste_metier.unlock()
예제 #6
0
    def _getJsonData(self, params: json, isError: IsErrorClass):
        """
            yield filename, file_content
        """
        try:
            # content loaded on client side
            if params.get("json") is not None:
                # load our json data
                my_json = params["json"]
                filename = "???"
                if params.get("filename") is not None:
                    filename = params["filename"]
                yield {"f": filename, "j": my_json}
                return

            # content to load from the server
            use_recursivity = False,

            if params.get("base_dir") is None:
                use_recursivity = True
                if hasattr(settings, "AUTOLOAD_DIR") is True:
                    params["base_dir"] = settings.AUTOLOAD_DIR
                else:
                    params["base_dir"] = (os.path.dirname(
                        os.path.dirname(os.path.abspath(__file__))) +
                                          "/../../data/json_auto_load")
            base_dir = params["base_dir"]
            files = []
            if params.get("filename") is not None:
                files.append({"p": base_dir, "f": params["filename"]})
            else:
                if use_recursivity is False:
                    for filename in os.listdir(base_dir):
                        if str(filename).endswith('.json'):
                            files.append({"p": base_dir, "f": filename})
                else:
                    for (dirpath, dirnames, filenames) in os.walk(base_dir):
                        for filename in filenames:
                            if str(filename).endswith('.json') and str(
                                    dirpath
                            ).endswith('/done') is False and str(
                                    dirpath).endswith('/failed') is False:
                                files.append({"p": dirpath, "f": filename})

            files = sorted(files, key=lambda k: k['f'], reverse=False)
            for aFileSpec in files:
                if self.stopRequested is True:
                    continue
                if aFileSpec["f"].endswith(".json"):
                    try:
                        # load our json file
                        texte = ""

                        with open(aFileSpec["p"] + '/' + aFileSpec["f"],
                                  "r") as f:
                            lignes = f.readlines()
                            for aligne in lignes:
                                texte += str(aligne)
                        my_json = JsonPlus().loads(texte)

                        # load_span.set_attribute("filename", aFile)
                        yield {"f": aFileSpec["f"], "j": my_json}
                        if isError.get() is False:
                            # load_span.add_event("file.moved to [dest]", {"dest": base_dir + "/done/" + aFile})
                            if not os.path.exists(aFileSpec["p"] + '/done'):
                                os.makedirs(aFileSpec["p"] + '/done')
                            os.rename(
                                aFileSpec["p"] + "/" + aFileSpec["f"],
                                aFileSpec["p"] + "/done/" + aFileSpec["f"])
                            # t.logInfo(
                            #     "json file loaded",
                            #     load_span,
                            #     {"filename": aFile, "dest": baseDir + "/done/" + aFile},
                            # )
                        else:
                            t.logInfo(
                                "file moved to fail directory",
                                None,
                                {
                                    "filename":
                                    aFileSpec["f"],
                                    "dest":
                                    aFileSpec["p"] + "/failed/" +
                                    aFileSpec["f"]
                                },
                            )
                            if not os.path.exists(aFileSpec["p"] + '/failed'):
                                os.makedirs(aFileSpec["p"] + '/failed')
                            os.rename(
                                aFileSpec["p"] + "/" + aFileSpec["f"],
                                aFileSpec["p"] + "/failed/" + aFileSpec["f"])

                    except Exception as exc:
                        t.LogCritical(exc)
                        raise exc

        except Exception as exc:
            t.LogCritical(exc)
            raise exc
예제 #7
0
    def _loadJsonItemInObs(
        self,
        json_file_data: json,
        trace_flag: bool = False,
        is_tmp: bool = False,
        use_validation: bool = False,
        filename: str = '???',
    ) -> json:
        """
        processJson

        calulus v2, load json in the obs & agg_toto tables
        """
        all_instr = AllTypeInstruments()

        measure_idx = 0
        # debut_process = datetime.datetime.now()
        my_span = self.tracer.get_current_or_new_span("Load Obs", trace_flag)

        while measure_idx < json_file_data["data"].__len__():
            # we use the stop_dat of our measure json as the start date for our processing
            m_stop_date_agg_start_date = json_file_data["data"][measure_idx][
                "stop_dat"]

            if (json_file_data["data"][measure_idx].get("current") is not None
                    and use_validation is False):
                m_duration = json_file_data["data"][measure_idx]["current"][
                    "duration"]
            else:
                # we don't care as we don't have current data
                m_duration = 0
                # in validation mode, we don't use the 'current'
                json_file_data["data"][measure_idx]["current"] = {
                    "duration": 0
                }
            poste_metier = PosteMetier(json_file_data["poste_id"],
                                       m_stop_date_agg_start_date)
            if measure_idx == 0:
                my_span.set_attribute("posteId", poste_metier.data.id)
                my_span.set_attribute("stopDat",
                                      str(m_stop_date_agg_start_date))
                my_span.set_attribute("isTmp", is_tmp)
            try:
                poste_metier.lock()
                obs_meteor = poste_metier.observation(
                    m_stop_date_agg_start_date, is_tmp)
                if (obs_meteor.data.id is not None and json_file_data["data"]
                    [measure_idx].__contains__("update_me") is False):
                    t.logInfo(
                        "file: " + filename + " skipping data[" +
                        str(measure_idx) + "], stop_dat: " +
                        str(m_stop_date_agg_start_date) + " already loaded",
                        my_span,
                    )
                    continue
                # load aggregations data in obs_meteor.data.j_agg
                m_agg_j = []
                if use_validation is True:
                    if (json_file_data["data"][measure_idx].get("validation")
                            is not None):
                        m_agg_j = json_file_data["data"][measure_idx][
                            "validation"]
                    if m_agg_j.__len__() == 0:
                        t.logInfo(
                            "skipping data[" + str(measure_idx) +
                            "], no data in JSON !!! stop_dat: " +
                            str(m_stop_date_agg_start_date), my_span)
                        continue
                else:
                    if json_file_data["data"][measure_idx].__contains__(
                            "aggregations"):
                        m_agg_j = json_file_data["data"][measure_idx][
                            "aggregations"]
                obs_meteor.data.j_agg = m_agg_j

                # load duration and stop_dat if not already loaded
                if (obs_meteor.data.duration == 0
                        and json_file_data["data"][measure_idx].get("current")
                        is not None):
                    obs_meteor.data.duration = json_file_data["data"][
                        measure_idx]["current"]["duration"]

                delta_values = {"maxminFix": [], "duration": m_duration}

                # for all type_instruments
                for an_intrument in all_instr.get_all_instruments():
                    # for all measures
                    for my_measure in an_intrument["object"].get_all_measures(
                    ):
                        # find the calculus object for my_mesure
                        for a_calculus in self.all_calculus:
                            if a_calculus["agg"] == my_measure["agg"]:
                                if a_calculus["calc_obs"] is not None:
                                    # load our json in obs row
                                    a_calculus["calc_obs"].loadInObs(
                                        poste_metier,
                                        my_measure,
                                        json_file_data,
                                        measure_idx,
                                        m_agg_j,
                                        obs_meteor,
                                        delta_values,
                                        trace_flag,
                                    )
                                break

                # save our new data
                obs_meteor.save()
                my_span.set_attribute("obsId_" + str(measure_idx),
                                      obs_meteor.data.id)

                a_todo = AggTodoMeteor(obs_meteor.data.id, is_tmp)
                a_todo.data.j_dv.append(delta_values)
                if measure_idx < json_file_data["data"].__len__() <= 1:
                    a_todo.data.priority = 0
                if obs_meteor.data.id is not None:
                    a_todo.save()

                # j_trace = {}

                # if trace_flag:
                #     duration2 = datetime.datetime.now() - debut_process
                #     j_trace["info"] = "idx=" + str(measure_idx)
                #     j_trace["total_exec"] = int(duration2.total_seconds() * 1000)
                #     j_trace["item_processed"] = json_file_data["data"].__len__()
                #     j_trace["one_exec"] = int(duration2.total_seconds() * 1000 / json_file_data["data"].__len__())
                #     # j_trace['start_dat'] = json_file_data['data'][measure_idx]['current']['start_dat']
                #     j_trace["stop_dat"] = str(json_file_data["data"][measure_idx]["stop_dat"])
                #     j_trace["obs data"] = JsonPlus().loads(JsonPlus().dumps(obs_meteor.data.j))
                #     j_trace["obs aggregations"] = JsonPlus().loads(JsonPlus().dumps(obs_meteor.data.j_agg))
                #     j_trace["agg_todo dv"] = (JsonPlus().loads(JsonPlus().dumps(a_todo.data.j_dv)) if not (a_todo.data.id is None) else "{}")
                # t.LogDebug("json item loaded", my_span, {'idx': measure_idx, 'time_exec': j_trace["total_exec"], 'items': j_trace["item_processed"], "stop_dat": j_trace["stop_dat"]})

            finally:
                measure_idx += 1
                poste_metier.unlock()

        return
예제 #8
0
    def _loadJsonArrayInObs(
        self,
        json_data_array: json,
        trace_flag: bool = False,
        is_tmp: bool = None,
        use_validation: bool = False,
        filename: str = "????",
    ) -> json:
        """
        processJson

        calulus v2, load json in the obs & agg_toto tables
        """
        debut_full_process = datetime.datetime.now()
        ret_data = []
        item_processed = 0
        all_item_processed = 0
        idx = 0
        meteor = "???"

        with self.tracer.start_as_current_span("Load Obs",
                                               trace_flag) as my_span:
            try:
                # validate our json
                meteor = str(json_data_array[0].get("meteor"))
                check_result = checkJson(json_data_array)
                if check_result is not None:
                    raise Exception("Meteor: " + meteor + ", filenme: " +
                                    filename + str(check_result))

                while idx < json_data_array.__len__():
                    try:
                        # start_time = datetime.datetime.now()
                        json_file_data = json_data_array[idx]
                        if idx == 0:
                            my_span.set_attribute("meteor", meteor)
                            my_span.set_attribute("filename", filename)

                        item_processed = json_file_data["data"].__len__()
                        all_item_processed += item_processed

                        self._loadJsonItemInObs(json_file_data, trace_flag,
                                                is_tmp, use_validation,
                                                filename)

                        # duration = datetime.datetime.now() - start_time
                        # dur_millisec = round(duration.total_seconds() * 1000)

                        # my_span.set_attribute("items_" + str(idx), item_processed)
                        # my_span.set_attribute("timeExec_" + str(idx), dur_millisec)
                        # ret_data.append(ret)

                    finally:
                        idx += 1

                global_duration = datetime.datetime.now() - debut_full_process
                dur_millisec = round(global_duration.total_seconds() * 1000)
                one_exec = round(dur_millisec / all_item_processed)
                ret_data.append({
                    "total_exec": dur_millisec,
                    "item_processed": all_item_processed,
                    "one_exec": one_exec,
                })
                t.logInfo(
                    "Json file loaded",
                    my_span,
                    {
                        "filename": filename,
                        "timeExec": dur_millisec,
                        "avgItemExec": one_exec,
                        "items": all_item_processed,
                        "meteor": meteor
                    },
                )

            except Exception as exc:
                my_span.record_exception(exc)
                raise exc

            finally:
                self.tracer.end_span()

        return ret_data
예제 #9
0
def enable_db_access_for_all_tests(db):
    t.logInfo("scope function with autouse")
    pass
예제 #10
0
def test_logInfo():
    log_e = logInfo("my message", None, {"a": 1}, True)
    assert log_e['msg'] == 'my message'
    assert log_e['loc'] == "test_logInfo::17"
    assert log_e['level'] == 'info'
    assert log_e["a"] == 1