def export_metrics(datajsonstring): for exporter in EXPORTERS: try: EXPORTERS[exporter].process_metric(datajsonstring) except Exception as e: PMGRPCDLOG.debug("Error processing packet on exporter %s. Error was %s", exporter, e) raise
def add_option(self, *arg, **kargs): envvar = kargs.get("env_name", None) try: del kargs["env_name"] except: pass can_be_none = kargs.get("can_be_none", False) try: del kargs["can_be_none"] except: pass if envvar is not None: new_help = kargs.get("help", "") new_help = new_help + " [Env variable {}]".format(envvar) kargs["help"] = new_help # Modify the default to be the one ine the env_name if envvar in os.environ: PMGRPCDLOG.debug( "Getting data from %s from the env variable %s", arg[0], envvar) kargs["default"] = os.environ[envvar] if not can_be_none and "default" in kargs and kargs[ "default"] is None: raise Exception("Parameter with env %s is None", envvar) super().add_option(*arg, **kargs)
def dataPublish(self, message, context): grpcPeer = {} grpcPeerStr = context.peer() ( grpcPeer["telemetry_proto"], grpcPeer["telemetry_node"], grpcPeer["telemetry_node_port"], ) = grpcPeerStr.split(":") grpcPeer["ne_vendor"] = "Huawei" PMGRPCDLOG.debug("Huawei MdtDialout Message: %s" % grpcPeer["telemetry_node"]) metadata = dict(context.invocation_metadata()) grpcPeer["user-agent"] = metadata["user-agent"] # Example of grpcPeerStr -> 'ipv4:10.215.133.23:57775' grpcPeer["grpc_processing"] = "huawei_grpc_dialout_pb2_grpc" grpcPeer["grpc_ulayer"] = "GPB Telemetry" jsonTelemetryNode = json.dumps(grpcPeer, indent=2, sort_keys=True) PMGRPCDLOG.debug("Huawei RAW Message: %s" % jsonTelemetryNode) for new_msg in message: PMGRPCDLOG.debug("Huawei new_msg iteration message") if lib_pmgrpcd.OPTIONS.ip: if grpcPeer["telemetry_node"] != lib_pmgrpcd.OPTIONS.ip: continue PMGRPCDLOG.debug("Huawei: ip filter matched with ip %s" % (lib_pmgrpcd.OPTIONS.ip)) try: huawei_processing(grpcPeer, new_msg) except Exception as e: PMGRPCDLOG.debug("Error processing Huawei packet, error is %s", e) continue return yield
def examples(dictTelemetryData_mod, jsonTelemetryData): global example_dict if dictTelemetryData_mod["collector"]["grpc"]["grpcPeer"]: grpcPeer = dictTelemetryData_mod["collector"]["grpc"]["grpcPeer"] if dictTelemetryData_mod["collector"]["grpc"]["ne_vendor"]: ne_vendor = dictTelemetryData_mod["collector"]["grpc"]["ne_vendor"] if dictTelemetryData_mod["collector"]["data"]["encoding_path"]: encoding_path = dictTelemetryData_mod["collector"]["data"][ "encoding_path"] PMGRPCDLOG.debug( "IN EXAMPLES: grpcPeer=%s ne_vendor=%s encoding_path=%s" % (grpcPeer, ne_vendor, encoding_path)) try: if not os.path.exists(lib_pmgrpcd.OPTIONS.examplepath): os.makedirs(lib_pmgrpcd.OPTIONS.examplepath) except OSError: pass if grpcPeer not in example_dict: example_dict.update({grpcPeer: []}) if encoding_path not in example_dict[grpcPeer]: example_dict[grpcPeer].append(encoding_path) encoding_path_mod = encoding_path.replace(":", "_").replace("/", "-") exafilename = grpcPeer + "_" + ne_vendor + "_" + encoding_path_mod + ".json" exapathfile = os.path.join(lib_pmgrpcd.OPTIONS.examplepath, exafilename) with open(exapathfile, "w") as exapathfile: # exapathfile.write("PROTOPATH[" + telemetry_node + "]: " + protopath + "\n") exapathfile.write(jsonTelemetryData) exapathfile.write("\n")
def add_option(self, *arg, **kargs): envvar = kargs.get("env_name", None) try: del kargs["env_name"] except: pass required = kargs.get("required", False) try: del kargs["required"] except: pass new_help = kargs.get("help", "") dest = kargs.get("dest", None) if dest is not None: new_help = new_help + f" [Configkey {dest}]" code_default = kargs.get("default", None) if code_default is not None: new_help = new_help + f" [Default {code_default}]" else: if required: new_help = new_help + f" [Mandatory]" if dest is not None and dest in self.config: PMGRPCDLOG.debug("Getting data from %s from the env variable %s", arg[0], envvar) kargs["default"] = self.config[dest] if envvar is not None: new_help = new_help + " [Env {}]".format(envvar) kargs["help"] = new_help # Modify the default to be the one ine the env_name if envvar in os.environ: PMGRPCDLOG.debug( "Getting data from %s from the env variable %s", arg[0], envvar) kargs["default"] = os.environ[envvar] kargs["help"] = new_help # We only do this if action is not store action = kargs.get("action", "") if action == "store_true": if "default" in kargs and not isinstance(kargs["default"], bool): kargs["default"] = bool(strtobool(kargs["default"])) if "default" not in kargs: kargs["default"] = False option = super().add_option(*arg, **kargs) option.required = required return option
def get_gpbmapfile(): global MAP_DICT if MAP_DICT is None: with open(lib_pmgrpcd.OPTIONS.gpbmapfile, "r") as file: MAP_DICT = {} for line in file: (k, v) = line.split("=") # a.e. "huawei-ifm" = 'huawei_ifm_pb2.Ifm()' MAP_DICT.update({k.lstrip().rstrip(): v.lstrip().rstrip()}) PMGRPCDLOG.debug("MAP_DICT: %s", MAP_DICT) return MAP_DICT
def select_gbp_methode(proto): try: map_dict = get_gpbmapfile() except: PMGRPCDLOG.error("Error getting the map dict") raise if proto in map_dict: PMGRPCDLOG.debug("I FOUND THE GPB (%s) FOR PROTO (%s)" % (proto, map_dict[proto])) # TODO: I am pretty sure we can do something better than this. msg = eval(map_dict[proto]) return msg else: PMGRPCDLOG.debug("MISSING GPB Methode for PROTO: %s", proto) lib_pmgrpcd.MISSGPBLIB.update({proto: str(datetime.now())}) return False
def MdtDialout(self, msg_iterator, context): try: grpcPeer = {} grpcPeerStr = context.peer() ( grpcPeer["telemetry_proto"], grpcPeer["telemetry_node"], grpcPeer["telemetry_node_port"], ) = grpcPeerStr.split(":") grpcPeer["ne_vendor"] = "Cisco" PMGRPCDLOG.debug("Cisco MdtDialout Message: %s" % grpcPeer["telemetry_node"]) # cisco_processing(grpcPeer, message, context) metadata = dict(context.invocation_metadata()) grpcPeer["user-agent"] = metadata["user-agent"] # Example of grpcPeerStr -> 'ipv4:10.215.133.23:57775' grpcPeer["grpc_processing"] = "cisco_grpc_dialout_pb2_grpc" grpcPeer["grpc_ulayer"] = "GPB Telemetry" jsonTelemetryNode = json.dumps(grpcPeer, indent=2, sort_keys=True) PMGRPCDLOG.debug("Cisco connection info: %s" % jsonTelemetryNode) for new_msg in msg_iterator: PMGRPCDLOG.debug("Cisco new_msg iteration message") # filter msgs that do not match the IP option if enabled. if lib_pmgrpcd.OPTIONS.ip: if grpcPeer["telemetry_node"] != lib_pmgrpcd.OPTIONS.ip: continue PMGRPCDLOG.debug("Cisco: ip filter matched with ip %s" % (lib_pmgrpcd.OPTIONS.ip)) try: cisco_processing(grpcPeer, new_msg) except Exception as e: PMGRPCDLOG.debug( "Error processing Cisco packet, error is %s", e) continue except Exception as e: print(type(e)) print(e.args) return yield
def FinalizeTelemetryData(dictTelemetryData): # Adding epoch in millisecond to identify this singel metric on the way to the storage epochmillis = int(round(time.time() * 1000)) dictTelemetryData["collector"]["data"].update( {"collection_timestamp": epochmillis}) dictTelemetryData_mod = dictTelemetryData.copy() # Going over the mitigation library, if needed. # TODO: Simplify the next part if lib_pmgrpcd.OPTIONS.mitigation: from mitigation import mod_all_json_data try: dictTelemetryData_mod = mod_all_json_data(dictTelemetryData_mod) jsonTelemetryData = json.dumps(dictTelemetryData_mod, indent=2, sort_keys=True) except Exception as e: PMGRPCDLOG.info("ERROR: mod_all_json_data raised a error:\n%s") PMGRPCDLOG.info("ERROR: %s" % (e)) dictTelemetryData_mod = dictTelemetryData jsonTelemetryData = json.dumps(dictTelemetryData, indent=2, sort_keys=True) else: dictTelemetryData_mod = dictTelemetryData jsonTelemetryData = json.dumps(dictTelemetryData, indent=2, sort_keys=True) PMGRPCDLOG.debug("After mitigation: %s" % (jsonTelemetryData)) if lib_pmgrpcd.OPTIONS.examplepath and lib_pmgrpcd.OPTIONS.example: examples(dictTelemetryData_mod, jsonTelemetryData) if lib_pmgrpcd.OPTIONS.jsondatadumpfile: PMGRPCDLOG.debug("Write jsondatadumpfile: %s" % (lib_pmgrpcd.OPTIONS.jsondatadumpfile)) with open(lib_pmgrpcd.OPTIONS.jsondatadumpfile, "a") as jsondatadumpfile: jsondatadumpfile.write(jsonTelemetryData) jsondatadumpfile.write("\n") # Filter only config. export = True if lib_pmgrpcd.OPTIONS.onlyopenconfig: PMGRPCDLOG.debug( "only openconfig filter matched because of options.onlyopenconfig: %s" % lib_pmgrpcd.OPTIONS.onlyopenconfig) export = False if "encoding_path" in dictTelemetryData_mod["collector"]["data"]: if ("openconfig" in dictTelemetryData_mod["collector"]["data"] ["encoding_path"]): export = True if export: export_metrics(jsonTelemetryData) return jsonTelemetryData
def huawei_processing(grpcPeer, new_msg): PMGRPCDLOG.debug("Huawei: Received GRPC-Data") # dump the raw data if lib_pmgrpcd.OPTIONS.rawdatafile: PMGRPCDLOG.debug("Write rawdatafile: %s" % (lib_pmgrpcd.OPTIONS.rawdatafile)) with open(lib_pmgrpcd.OPTIONS.rawdatafile, "a") as rawdatafile: rawdatafile.write(base64.b64encode(new_msg.data).decode()) rawdatafile.write("\n") try: telemetry_msg = huawei_telemetry_pb2.Telemetry() telemetry_msg.ParseFromString(new_msg.data) except Exception as e: PMGRPCDLOG.error( "instancing or parsing data failed with huawei_telemetry_pb2.Telemetry" ) PMGRPCDLOG.error("ERROR: %s" % (e)) raise try: telemetry_msg_dict = MessageToDict( telemetry_msg, including_default_value_fields=True, preserving_proto_field_name=True, use_integers_for_enums=True, ) except Exception as e: PMGRPCDLOG.error( "instancing or parsing data failed with huawei_telemetry_pb2.Telemetry" ) raise PMGRPCDLOG.debug("Huawei: Received GPB-Data as JSON") # TODO: Do we really need this? it can be expensive PMGRPCDLOG.debug(json.dumps(telemetry_msg_dict, indent=2, sort_keys=True)) message_header_dict = telemetry_msg_dict.copy() if "data_gpb" in message_header_dict: del message_header_dict["data_gpb"] (proto, path) = message_header_dict["sensor_path"].split(":") (node_id_str) = message_header_dict["node_id_str"] (node_ip) = grpcPeer["telemetry_node"] (ne_vendor) = grpcPeer["ne_vendor"] # Get the maching L3-Methode msg = select_gbp_methode(proto) if msg: elem = len(telemetry_msg.data_gpb.row) epochmillis = int(round(time.time() * 1000)) PMGRPCDLOG.info( "EPOCH=%-10s NIP=%-15s NID=%-20s VEN=%-7s PT=%-22s ET=%-12s ELEM:%s" % (epochmillis, node_ip, node_id_str, ne_vendor, proto, "GPB", elem)) # L2: for new_row in telemetry_msg.data_gpb.row: # PMGRPCDLOG.info("NEW_ROW: %s" % (new_row)) new_row_header_dict = MessageToDict( new_row, including_default_value_fields=True, preserving_proto_field_name=True, use_integers_for_enums=True, ) if "content" in new_row_header_dict: del new_row_header_dict["content"] # L3: msg.ParseFromString(new_row.content) content = MessageToDict( msg, including_default_value_fields=True, preserving_proto_field_name=True, use_integers_for_enums=True, ) message_dict = {} message_dict.update({ "collector": { "grpc": { "grpcPeer": grpcPeer["telemetry_node"], "ne_vendor": grpcPeer["ne_vendor"], } } }) message_dict["collector"].update( {"data": message_header_dict.copy()}) message_dict["collector"]["data"].update(new_row_header_dict) message_dict.update(content) allkeys = parse_dict(content, ret="", level=0) PMGRPCDLOG.debug("Huawei: %s: %s" % (proto, allkeys)) try: returned = FinalizeTelemetryData(message_dict) except Exception as e: PMGRPCDLOG.error("Error finalazing message: %s", e)
def cisco_processing(grpcPeer, new_msg): messages = {} grpc_message = {} encoding_type = None PMGRPCDLOG.debug("Cisco: Received GRPC-Data") PMGRPCDLOG.debug(new_msg.data) # dump the raw data if lib_pmgrpcd.OPTIONS.rawdatadumpfile: PMGRPCDLOG.debug("Write rawdatadumpfile: %s" % (lib_pmgrpcd.OPTIONS.rawdatafile)) with open(lib_pmgrpcd.OPTIONS.rawdatadumpfile, "a") as rawdatafile: rawdatafile.write(base64.b64encode(new_msg.data).decode()) rawdatafile.write("\n") # Find the encoding of the packet try: encoding_type, grpc_message = find_encoding_and_decode(new_msg) except Exception as e: PMGRPCDLOG.error("Error decoding packet. Error is {}".format(e)) PMGRPCDLOG.debug("encoding_type is: %s\n" % (encoding_type)) if (encoding_type == "unknown") or encoding_type is None: print("encoding_type is unknown.") if (encoding_type == "unknown") or encoding_type is None: raise Exception("Encoding type unknown") message_header_dict = grpc_message.copy() if "data_json" in message_header_dict: del message_header_dict["data_json"] PMGRPCDLOG.debug("Header:%s", message_header_dict) (node_ip) = grpcPeer["telemetry_node"] (ne_vendor) = grpcPeer["ne_vendor"] epochmillis = int(round(time.time() * 1000)) if encoding_type == "ciscojson": message_header_dict.update({"encoding_type": encoding_type}) (proto, path) = message_header_dict["encoding_path"].split(":") (node_id_str) = message_header_dict["node_id_str"] elem = len(grpc_message["data_json"]) messages = grpc_message["data_json"] elif encoding_type == "ciscogrpckv": message_header_dict.update({"encoding_type": encoding_type}) message_header_dict["encoding_path"] = message_header_dict.pop("encodingPath") message_header_dict["node_id_str"] = message_header_dict.pop("nodeIdStr") message_header_dict["msg_timestamp"] = message_header_dict.pop("msgTimestamp") message_header_dict["subscription_id_str"] = message_header_dict.pop( "subscriptionIdStr" ) full_ecoding_path = message_header_dict["encoding_path"] if ":" in full_ecoding_path: (proto, path) = message_header_dict["encoding_path"].split(":") else: proto = None path = full_ecoding_path (node_id_str) = message_header_dict["node_id_str"] if "dataGpbkv" in grpc_message: elem = len(grpc_message["dataGpbkv"]) messages = grpc_message["dataGpbkv"] else: elem = 0 messages = {} message_header_dict["path"] = path PMGRPCDLOG.info( "EPOCH=%-10s NIP=%-15s NID=%-20s VEN=%-7s PT=%-22s ET=%-12s ELEM=%s", epochmillis, node_ip, node_id_str, ne_vendor, proto, encoding_type, elem, ) # A single telemetry packet can contain multiple msgs (each having their own key/values). # here we are processing them one by one. for listelem in messages: # Copy the necessary metadata to the packet. PMGRPCDLOG.debug("LISTELEM: %s", listelem) message_dict = {} message_dict.update({"collector": {"grpc": {}}}) message_dict["collector"]["grpc"].update( {"grpcPeer": grpcPeer["telemetry_node"]} ) message_dict["collector"]["grpc"].update({"ne_vendor": grpcPeer["ne_vendor"]}) message_dict["collector"].update({"data": message_header_dict}) if encoding_type == "ciscojson": PMGRPCDLOG.debug("TEST: %s | %s", path, listelem["content"]) message_dict.update({path: listelem["content"]}) elif encoding_type == "ciscogrpckv": PMGRPCDLOG.debug("TEST: %s | %s", path, listelem["fields"]) message_dict.update({path: listelem["fields"]}) # allkeys = parse_dict(listelem, ret='', level=0) # PMGRPCDLOG.info("Cisco: %s: %s" % (proto, allkeys)) try: returned = FinalizeTelemetryData(message_dict) except Exception as e: PMGRPCDLOG.error("Error finalazing message: %s", e)
def find_encoding_and_decode(new_msg): encoding_type = None grpc_message = {} # TODO. If options force one type, only try that one. # Maybe it is json if lib_pmgrpcd.OPTIONS.cenctype == 'json': PMGRPCDLOG.debug("Try to parse json") try: grpc_message = json.loads(new_msg.data) encoding_type = "ciscojson" except Exception as e: PMGRPCDLOG.debug( "ERROR: Direct json parsing of grpc_message failed with message:\n%s\n", e ) else: return encoding_type, grpc_message elif lib_pmgrpcd.OPTIONS.cenctype == 'gpbkv': PMGRPCDLOG.debug("Try to unmarshall KV") if encoding_type is None: try: grpc_message = process_cisco_kv(new_msg) encoding_type = "ciscogrpckv" except Exception as e: PMGRPCDLOG.debug( "ERROR: Parsing of json after unmarshall KV failed with message:\n%s\n", e, ) else: return encoding_type, grpc_message elif lib_pmgrpcd.OPTIONS.cenctype == 'gpbcomp': PMGRPCDLOG.debug("Try to unmarshall compact mode") PMGRPCDLOG.debug("TODO") encoding_type = "unknown" return encoding_type, grpc_message
def FinalizeTelemetryData(dictTelemetryData): # Adding epoch in millisecond to identify this singel metric on the way to the storage epochmillis = int(round(time.time() * 1000)) dictTelemetryData["collector"]["data"].update( {"collection_timestamp": epochmillis}) dictTelemetryData_mod = dictTelemetryData.copy() # Going over the mitigation library, if needed. # TODO: Simplify the next part dictTelemetryData_beforeencoding = None if lib_pmgrpcd.OPTIONS.mitigation: from mitigation import mod_all_json_data try: dictTelemetryData_mod = mod_all_json_data(dictTelemetryData_mod) dictTelemetryData_beforeencoding = dictTelemetryData_mod jsonTelemetryData = json.dumps(dictTelemetryData_mod, indent=2, sort_keys=True) except Exception as e: PMGRPCDLOG.info("ERROR: mod_all_json_data raised a error:\n%s") PMGRPCDLOG.info("ERROR: %s" % (e)) dictTelemetryData_mod = dictTelemetryData dictTelemetryData_beforeencoding = dictTelemetryData jsonTelemetryData = json.dumps(dictTelemetryData, indent=2, sort_keys=True) else: dictTelemetryData_mod = dictTelemetryData dictTelemetryData_beforeencoding = dictTelemetryData jsonTelemetryData = json.dumps(dictTelemetryData, indent=2, sort_keys=True) PMGRPCDLOG.debug("After mitigation: %s" % (jsonTelemetryData)) # Check if we need to transform. This will change later #breakpoint() if get_lock() else None path = dictTelemetryData_beforeencoding["collector"]["data"]["path"] actual_data = dictTelemetryData_beforeencoding.get(path, {}) #if path == "sys/intf": # return print(path) #breakpoint() if get_lock() else None if TRANSFORMATION and dictTelemetryData_beforeencoding and "dataGpbkv" in dictTelemetryData_beforeencoding.get( "collector", {}).get("data", {}): data = dictTelemetryData_beforeencoding["collector"]["data"].copy() data["dataGpbkv"] = [{"fields": actual_data}] # we just transform for kv metric = CiscoKVFlatten.build_from_dcit(data) internals = list(metric.get_internal()) #breakpoint() if get_lock() else None for internal in internals: for new_metric in TRANSFORMATION.transform(internal): print(new_metric.keys) data = new_metric.data data["dataGpbkv"] = new_metric.content export_metrics(json.dumps({"collector": {"data": data}})) #breakpoint() if get_lock() else None return jsonTelemetryData #breakpoint() if get_lock() else None if lib_pmgrpcd.OPTIONS.examplepath and lib_pmgrpcd.OPTIONS.example: examples(dictTelemetryData_mod, jsonTelemetryData) if lib_pmgrpcd.OPTIONS.jsondatadumpfile: PMGRPCDLOG.debug("Write jsondatadumpfile: %s" % (lib_pmgrpcd.OPTIONS.jsondatadumpfile)) with open(lib_pmgrpcd.OPTIONS.jsondatadumpfile, "a") as jsondatadumpfile: jsondatadumpfile.write(jsonTelemetryData) jsondatadumpfile.write("\n") # Filter only config. export = True if lib_pmgrpcd.OPTIONS.onlyopenconfig: PMGRPCDLOG.debug( "only openconfig filter matched because of options.onlyopenconfig: %s" % lib_pmgrpcd.OPTIONS.onlyopenconfig) export = False if "encoding_path" in dictTelemetryData_mod["collector"]["data"]: if ("openconfig" in dictTelemetryData_mod["collector"]["data"] ["encoding_path"]): export = True if export: export_metrics(jsonTelemetryData) return jsonTelemetryData