def FinalizeTelemetryData(dictTelemetryData): # Adding epoch in millisecond to identify this singel metric on the way to the storage epochmillis = int(round(time.time() * 1000)) dictTelemetryData["collector"]["data"].update( {"collection_timestamp": epochmillis}) dictTelemetryData_mod = dictTelemetryData.copy() # Going over the mitigation library, if needed. # TODO: Simplify the next part if lib_pmgrpcd.OPTIONS.mitigation: from mitigation import mod_all_json_data try: dictTelemetryData_mod = mod_all_json_data(dictTelemetryData_mod) jsonTelemetryData = json.dumps(dictTelemetryData_mod, indent=2, sort_keys=True) except Exception as e: PMGRPCDLOG.info("ERROR: mod_all_json_data raised a error:\n%s") PMGRPCDLOG.info("ERROR: %s" % (e)) dictTelemetryData_mod = dictTelemetryData jsonTelemetryData = json.dumps(dictTelemetryData, indent=2, sort_keys=True) else: dictTelemetryData_mod = dictTelemetryData jsonTelemetryData = json.dumps(dictTelemetryData, indent=2, sort_keys=True) PMGRPCDLOG.debug("After mitigation: %s" % (jsonTelemetryData)) if lib_pmgrpcd.OPTIONS.examplepath and lib_pmgrpcd.OPTIONS.example: examples(dictTelemetryData_mod, jsonTelemetryData) if lib_pmgrpcd.OPTIONS.jsondatadumpfile: PMGRPCDLOG.debug("Write jsondatadumpfile: %s" % (lib_pmgrpcd.OPTIONS.jsondatadumpfile)) with open(lib_pmgrpcd.OPTIONS.jsondatadumpfile, "a") as jsondatadumpfile: jsondatadumpfile.write(jsonTelemetryData) jsondatadumpfile.write("\n") # Filter only config. export = True if lib_pmgrpcd.OPTIONS.onlyopenconfig: PMGRPCDLOG.debug( "only openconfig filter matched because of options.onlyopenconfig: %s" % lib_pmgrpcd.OPTIONS.onlyopenconfig) export = False if "encoding_path" in dictTelemetryData_mod["collector"]["data"]: if ("openconfig" in dictTelemetryData_mod["collector"]["data"] ["encoding_path"]): export = True if export: export_metrics(jsonTelemetryData) return jsonTelemetryData
def FinalizeTelemetryData(dictTelemetryData): global options global zmqSock #Adding epoch in millisecond to identify this singel metric on the way to the storage epochmillis = int(round(time.time() * 1000)) dictTelemetryData["collector"]["data"].update( {"collection_timestamp": epochmillis}) dictTelemetryData_mod = dictTelemetryData.copy() if options.mitigation: from mitigation import mod_all_json_data dictTelemetryData_mod = mod_all_json_data(dictTelemetryData_mod) jsonTelemetryData = json.dumps(dictTelemetryData_mod, indent=2, sort_keys=True) else: dictTelemetryData_mod = dictTelemetryData jsonTelemetryData = json.dumps(dictTelemetryData, indent=2, sort_keys=True) if (options.examplepath and options.example): examples(dictTelemetryData_mod, jsonTelemetryData) if options.jsondatafile: pmgrpcdlog.debug("Write jsondatafile: %s" % (options.jsondatafile)) with open(options.jsondatafile, 'a') as jsondatafile: jsondatafile.write(jsonTelemetryData) jsondatafile.write("\n") if options.onlyopenconfig: pmgrpcdlog.debug( "only openconfig filter matched because of options.onlyopenconfig: %s" % options.onlyopenconfig) if "encoding_path" in dictTelemetryData_mod["collector"]["data"]: if 'openconfig' in dictTelemetryData_mod["collector"]["data"][ "encoding_path"]: pmgrpcdlog.debug("Write jsondatafile: %s" % (options.jsondatafile)) #Maby AVRO Forwarding is disabled if options.kafkaavro: serializelog.debug("kafkaavro is enabled") process_metric(jsonTelemetryData) #Maby ZMQ Forwarding is enabled if options.zmq: if not zmqSock.closed: try: zmqSock.send_json("%s" % jsonTelemetryData) except ZMQError: serializelog.debug("ZMQError: %s" % (options.jsondatafile)) pass else: #Maby AVRO Forwarding is disabled if options.kafkaavro: serializelog.debug("kafkaavro is enabled") process_metric(jsonTelemetryData) #Maby ZMQ Forwarding is enabled if options.zmq: if not zmqSock.closed: try: zmqSock.send_json("%s" % jsonTelemetryData) except ZMQError: serializelog.debug("ZMQError: %s" % (options.jsondatafile)) pass return jsonTelemetryData
def FinalizeTelemetryData(dictTelemetryData): # Adding epoch in millisecond to identify this singel metric on the way to the storage epochmillis = int(round(time.time() * 1000)) dictTelemetryData["collector"]["data"].update( {"collection_timestamp": epochmillis}) dictTelemetryData_mod = dictTelemetryData.copy() # Going over the mitigation library, if needed. # TODO: Simplify the next part dictTelemetryData_beforeencoding = None if lib_pmgrpcd.OPTIONS.mitigation: from mitigation import mod_all_json_data try: dictTelemetryData_mod = mod_all_json_data(dictTelemetryData_mod) dictTelemetryData_beforeencoding = dictTelemetryData_mod jsonTelemetryData = json.dumps(dictTelemetryData_mod, indent=2, sort_keys=True) except Exception as e: PMGRPCDLOG.info("ERROR: mod_all_json_data raised a error:\n%s") PMGRPCDLOG.info("ERROR: %s" % (e)) dictTelemetryData_mod = dictTelemetryData dictTelemetryData_beforeencoding = dictTelemetryData jsonTelemetryData = json.dumps(dictTelemetryData, indent=2, sort_keys=True) else: dictTelemetryData_mod = dictTelemetryData dictTelemetryData_beforeencoding = dictTelemetryData jsonTelemetryData = json.dumps(dictTelemetryData, indent=2, sort_keys=True) PMGRPCDLOG.debug("After mitigation: %s" % (jsonTelemetryData)) # Check if we need to transform. This will change later #breakpoint() if get_lock() else None path = dictTelemetryData_beforeencoding["collector"]["data"]["path"] actual_data = dictTelemetryData_beforeencoding.get(path, {}) #if path == "sys/intf": # return print(path) #breakpoint() if get_lock() else None if TRANSFORMATION and dictTelemetryData_beforeencoding and "dataGpbkv" in dictTelemetryData_beforeencoding.get( "collector", {}).get("data", {}): data = dictTelemetryData_beforeencoding["collector"]["data"].copy() data["dataGpbkv"] = [{"fields": actual_data}] # we just transform for kv metric = CiscoKVFlatten.build_from_dcit(data) internals = list(metric.get_internal()) #breakpoint() if get_lock() else None for internal in internals: for new_metric in TRANSFORMATION.transform(internal): print(new_metric.keys) data = new_metric.data data["dataGpbkv"] = new_metric.content export_metrics(json.dumps({"collector": {"data": data}})) #breakpoint() if get_lock() else None return jsonTelemetryData #breakpoint() if get_lock() else None if lib_pmgrpcd.OPTIONS.examplepath and lib_pmgrpcd.OPTIONS.example: examples(dictTelemetryData_mod, jsonTelemetryData) if lib_pmgrpcd.OPTIONS.jsondatadumpfile: PMGRPCDLOG.debug("Write jsondatadumpfile: %s" % (lib_pmgrpcd.OPTIONS.jsondatadumpfile)) with open(lib_pmgrpcd.OPTIONS.jsondatadumpfile, "a") as jsondatadumpfile: jsondatadumpfile.write(jsonTelemetryData) jsondatadumpfile.write("\n") # Filter only config. export = True if lib_pmgrpcd.OPTIONS.onlyopenconfig: PMGRPCDLOG.debug( "only openconfig filter matched because of options.onlyopenconfig: %s" % lib_pmgrpcd.OPTIONS.onlyopenconfig) export = False if "encoding_path" in dictTelemetryData_mod["collector"]["data"]: if ("openconfig" in dictTelemetryData_mod["collector"]["data"] ["encoding_path"]): export = True if export: export_metrics(jsonTelemetryData) return jsonTelemetryData