def ComputAggregFromSvc(self, data: json): """entry point called from worker""" self.stopRequested = False # load params if data is None or data.get("param") is None or data["param"] == {}: t.LogError("wrong data") params = data["param"] # Stop request if params.get("StopMe") is True: self.stopRequested = True return # load parameters (global trace, then params.trace_flag) if RefManager.GetInstance().GetRef("calcAgg_trace_flag") is None: trace_flag = False else: trace_flag = RefManager.GetInstance().GetRef("calcAgg_trace_flag") if params.get("trace_flag") is not None: trace_flag = params["trace_flag"] # load other params is_tmp = False ret = [] if params.get("is_tmp") is not None: is_tmp = params["is_tmp"] # run now try: ret = self._computeAggreg(is_tmp, trace_flag) return ret except Exception as inst: t.LogCritical(inst)
def record_exception(self, exc: Exception): """ record_exception add an exception in the Span """ t.LogCritical(exc, self) return
def handle(self, *args, **options): try: if options["pid"]: poste_id = int(options["pid"]) else: raise Exception("compAgg", "missing poste_id") from_dt = datetime.datetime(1900, 1, 1) if options["from"]: from_dt = str_to_date(options["from"]) to_dt = datetime.datetime(2900, 1, 1) if options["to"]: to_dt = str_to_date(options["to"]) disp_details = False if options["details"]: disp_details = True level = "*" if options["hour"]: level = "H" if options["day"]: level = "D" if options["month"]: level = "M" if options["year"]: level = "Y" if options["all"]: level = "A" self.analyseAggreg(poste_id, from_dt, to_dt, disp_details, level) except Exception as e: if e.__dict__.__len__() == 0 or "done" not in e.__dict__: exception_type, exception_object, exception_traceback = sys.exc_info( ) exception_info = e.__repr__() filename = exception_traceback.tb_frame.f_code.co_filename module = exception_traceback.tb_frame.f_code.co_name line_number = exception_traceback.tb_lineno e.info = { "i": str(exception_info), "f": filename, "n": module, "l": line_number, } e.done = True errMsg = t.LogCritical(e, None, {}, True) print(errMsg) exit(0)
def callRemoteObsSvc(self, file_name: str, delete_flag: bool, trace_flag: bool, is_tmp, use_validation: bool = False): try: texte = '' with open(file_name, "r") as f: lignes = f.readlines() for aligne in lignes: texte += str(aligne) my_json = JsonPlus().loads(texte) params = { "json": my_json, "delete": delete_flag, "is_tmp": is_tmp, "validation": use_validation } self.callService('auto', SvcRequestType.Run, False, params) except Exception as e: if e.__dict__.__len__() == 0 or 'done' not in e.__dict__: exception_type, exception_object, exception_traceback = sys.exc_info( ) exception_info = e.__repr__() filename = exception_traceback.tb_frame.f_code.co_filename line_number = exception_traceback.tb_lineno funcname = exception_traceback.tb_frame.f_code.co_name e.info = { "i": str(exception_info), "n": funcname, "f": filename, "l": line_number, } e.done = True errMsg = t.LogCritical(e, None, {}, True) print(errMsg) exit(0)
def callService(self, service_name: str, command: int, trace_flag: bool, params: json): try: url = "http://localhost:8000/app/svc" data = { "svc": service_name, "cde": command, "trace_flag": trace_flag, "params": params } headers = {'Content-Type': 'application/json'} r = requests.post(url, data=JsonPlus().dumps(data), headers=headers) # if r.status_code rj = r.json() for a_result in rj['result']: self.stdout.write(a_result) except Exception as e: if e.__dict__.__len__() == 0 or 'done' not in e.__dict__: exception_type, exception_object, exception_traceback = sys.exc_info( ) exception_info = e.__repr__() filename = exception_traceback.tb_frame.f_code.co_filename funcname = exception_traceback.tb_frame.f_code.co_name line_number = exception_traceback.tb_lineno e.info = { "i": str(exception_info), "n": funcname, "f": filename, "l": line_number, } e.done = True errMsg = t.LogCritical(e, None, {}, True) print(errMsg) exit(0)
def LoadJsonFromSvc(self, data: json): """ common params: "json: json_data pre loaded "delete": delete_flag, "is_tmp": is_tmp, "validation": use_validation when called from command line: data["param]: "json": my_json # mandatory ! when called from autoLoad "base_dir": my_json, """ # Load params self.stopRequested = False if data is None or data.get("param") is None or data["param"] == {}: t.LogError("wrong data") return params = data["param"] # stop request if params.get("StopMe") is True: self.stopRequested = True return # load parameters (global trace, then params.trace_flag) if RefManager.GetInstance().GetRef("loadObs_trace_flag") is None: trace_flag = False else: trace_flag = RefManager.GetInstance().GetRef("loadObs_trace_flag") if params.get("trace_flag") is not None: trace_flag = params["trace_flag"] # load other parameters is_tmp = delete_flag = use_validation = False ret = [] if params.get("is_tmp") is not None: is_tmp = params["is_tmp"] if params.get("delete") is not None: delete_flag = params["delete"] if params.get("validation") is not None: use_validation = params["validation"] # delete is not part of the transaction if delete_flag: self.delete_obs_agg(is_tmp) # load the content of files on the server isError = IsErrorClass() for j_data in self._getJsonData(params, isError): try: if CalcObs.lock.acquire(True, 500) is False: t.logWarning("lock time-out !") return try: func_ret = self._loadJsonArrayInObs( j_data["j"], trace_flag, is_tmp, use_validation, j_data.get("f")) finally: CalcObs.lock.release() ret.append(func_ret[0]) except Exception as inst: isError.set(True) t.LogCritical(inst) # activate the computation of aggregations SvcAggreg.runMe({"is_tmp": is_tmp, "trace_flag": trace_flag}) return ret
def _getJsonData(self, params: json, isError: IsErrorClass): """ yield filename, file_content """ try: # content loaded on client side if params.get("json") is not None: # load our json data my_json = params["json"] filename = "???" if params.get("filename") is not None: filename = params["filename"] yield {"f": filename, "j": my_json} return # content to load from the server use_recursivity = False, if params.get("base_dir") is None: use_recursivity = True if hasattr(settings, "AUTOLOAD_DIR") is True: params["base_dir"] = settings.AUTOLOAD_DIR else: params["base_dir"] = (os.path.dirname( os.path.dirname(os.path.abspath(__file__))) + "/../../data/json_auto_load") base_dir = params["base_dir"] files = [] if params.get("filename") is not None: files.append({"p": base_dir, "f": params["filename"]}) else: if use_recursivity is False: for filename in os.listdir(base_dir): if str(filename).endswith('.json'): files.append({"p": base_dir, "f": filename}) else: for (dirpath, dirnames, filenames) in os.walk(base_dir): for filename in filenames: if str(filename).endswith('.json') and str( dirpath ).endswith('/done') is False and str( dirpath).endswith('/failed') is False: files.append({"p": dirpath, "f": filename}) files = sorted(files, key=lambda k: k['f'], reverse=False) for aFileSpec in files: if self.stopRequested is True: continue if aFileSpec["f"].endswith(".json"): try: # load our json file texte = "" with open(aFileSpec["p"] + '/' + aFileSpec["f"], "r") as f: lignes = f.readlines() for aligne in lignes: texte += str(aligne) my_json = JsonPlus().loads(texte) # load_span.set_attribute("filename", aFile) yield {"f": aFileSpec["f"], "j": my_json} if isError.get() is False: # load_span.add_event("file.moved to [dest]", {"dest": base_dir + "/done/" + aFile}) if not os.path.exists(aFileSpec["p"] + '/done'): os.makedirs(aFileSpec["p"] + '/done') os.rename( aFileSpec["p"] + "/" + aFileSpec["f"], aFileSpec["p"] + "/done/" + aFileSpec["f"]) # t.logInfo( # "json file loaded", # load_span, # {"filename": aFile, "dest": baseDir + "/done/" + aFile}, # ) else: t.logInfo( "file moved to fail directory", None, { "filename": aFileSpec["f"], "dest": aFileSpec["p"] + "/failed/" + aFileSpec["f"] }, ) if not os.path.exists(aFileSpec["p"] + '/failed'): os.makedirs(aFileSpec["p"] + '/failed') os.rename( aFileSpec["p"] + "/" + aFileSpec["f"], aFileSpec["p"] + "/failed/" + aFileSpec["f"]) except Exception as exc: t.LogCritical(exc) raise exc except Exception as exc: t.LogCritical(exc) raise exc