def FinalizeTelemetryData(dictTelemetryData): # Adding epoch in millisecond to identify this singel metric on the way to the storage epochmillis = int(round(time.time() * 1000)) dictTelemetryData["collector"]["data"].update( {"collection_timestamp": epochmillis}) dictTelemetryData_mod = dictTelemetryData.copy() # Going over the mitigation library, if needed. # TODO: Simplify the next part if lib_pmgrpcd.OPTIONS.mitigation: from mitigation import mod_all_json_data try: dictTelemetryData_mod = mod_all_json_data(dictTelemetryData_mod) jsonTelemetryData = json.dumps(dictTelemetryData_mod, indent=2, sort_keys=True) except Exception as e: PMGRPCDLOG.info("ERROR: mod_all_json_data raised a error:\n%s") PMGRPCDLOG.info("ERROR: %s" % (e)) dictTelemetryData_mod = dictTelemetryData jsonTelemetryData = json.dumps(dictTelemetryData, indent=2, sort_keys=True) else: dictTelemetryData_mod = dictTelemetryData jsonTelemetryData = json.dumps(dictTelemetryData, indent=2, sort_keys=True) PMGRPCDLOG.debug("After mitigation: %s" % (jsonTelemetryData)) if lib_pmgrpcd.OPTIONS.examplepath and lib_pmgrpcd.OPTIONS.example: examples(dictTelemetryData_mod, jsonTelemetryData) if lib_pmgrpcd.OPTIONS.jsondatadumpfile: PMGRPCDLOG.debug("Write jsondatadumpfile: %s" % (lib_pmgrpcd.OPTIONS.jsondatadumpfile)) with open(lib_pmgrpcd.OPTIONS.jsondatadumpfile, "a") as jsondatadumpfile: jsondatadumpfile.write(jsonTelemetryData) jsondatadumpfile.write("\n") # Filter only config. export = True if lib_pmgrpcd.OPTIONS.onlyopenconfig: PMGRPCDLOG.debug( "only openconfig filter matched because of options.onlyopenconfig: %s" % lib_pmgrpcd.OPTIONS.onlyopenconfig) export = False if "encoding_path" in dictTelemetryData_mod["collector"]["data"]: if ("openconfig" in dictTelemetryData_mod["collector"]["data"] ["encoding_path"]): export = True if export: export_metrics(jsonTelemetryData) return jsonTelemetryData
def dataPublish(self, message, context): grpcPeer = {} grpcPeerStr = context.peer() ( grpcPeer["telemetry_proto"], grpcPeer["telemetry_node"], grpcPeer["telemetry_node_port"], ) = grpcPeerStr.split(":") grpcPeer["ne_vendor"] = "Huawei" PMGRPCDLOG.debug("Huawei MdtDialout Message: %s" % grpcPeer["telemetry_node"]) metadata = dict(context.invocation_metadata()) grpcPeer["user-agent"] = metadata["user-agent"] # Example of grpcPeerStr -> 'ipv4:10.215.133.23:57775' grpcPeer["grpc_processing"] = "huawei_grpc_dialout_pb2_grpc" grpcPeer["grpc_ulayer"] = "GPB Telemetry" jsonTelemetryNode = json.dumps(grpcPeer, indent=2, sort_keys=True) PMGRPCDLOG.debug("Huawei RAW Message: %s" % jsonTelemetryNode) for new_msg in message: PMGRPCDLOG.debug("Huawei new_msg iteration message") if lib_pmgrpcd.OPTIONS.ip: if grpcPeer["telemetry_node"] != lib_pmgrpcd.OPTIONS.ip: continue PMGRPCDLOG.debug("Huawei: ip filter matched with ip %s" % (lib_pmgrpcd.OPTIONS.ip)) try: huawei_processing(grpcPeer, new_msg) except Exception as e: PMGRPCDLOG.debug("Error processing Huawei packet, error is %s", e) continue return yield
def export_metrics(datajsonstring): for exporter in EXPORTERS: try: EXPORTERS[exporter].process_metric(datajsonstring) except Exception as e: PMGRPCDLOG.debug("Error processing packet on exporter %s. Error was %s", exporter, e) raise
def add_option(self, *arg, **kargs): envvar = kargs.get("env_name", None) try: del kargs["env_name"] except: pass can_be_none = kargs.get("can_be_none", False) try: del kargs["can_be_none"] except: pass if envvar is not None: new_help = kargs.get("help", "") new_help = new_help + " [Env variable {}]".format(envvar) kargs["help"] = new_help # Modify the default to be the one ine the env_name if envvar in os.environ: PMGRPCDLOG.debug( "Getting data from %s from the env variable %s", arg[0], envvar) kargs["default"] = os.environ[envvar] if not can_be_none and "default" in kargs and kargs[ "default"] is None: raise Exception("Parameter with env %s is None", envvar) super().add_option(*arg, **kargs)
def examples(dictTelemetryData_mod, jsonTelemetryData): global example_dict if dictTelemetryData_mod["collector"]["grpc"]["grpcPeer"]: grpcPeer = dictTelemetryData_mod["collector"]["grpc"]["grpcPeer"] if dictTelemetryData_mod["collector"]["grpc"]["ne_vendor"]: ne_vendor = dictTelemetryData_mod["collector"]["grpc"]["ne_vendor"] if dictTelemetryData_mod["collector"]["data"]["encoding_path"]: encoding_path = dictTelemetryData_mod["collector"]["data"][ "encoding_path"] PMGRPCDLOG.debug( "IN EXAMPLES: grpcPeer=%s ne_vendor=%s encoding_path=%s" % (grpcPeer, ne_vendor, encoding_path)) try: if not os.path.exists(lib_pmgrpcd.OPTIONS.examplepath): os.makedirs(lib_pmgrpcd.OPTIONS.examplepath) except OSError: pass if grpcPeer not in example_dict: example_dict.update({grpcPeer: []}) if encoding_path not in example_dict[grpcPeer]: example_dict[grpcPeer].append(encoding_path) encoding_path_mod = encoding_path.replace(":", "_").replace("/", "-") exafilename = grpcPeer + "_" + ne_vendor + "_" + encoding_path_mod + ".json" exapathfile = os.path.join(lib_pmgrpcd.OPTIONS.examplepath, exafilename) with open(exapathfile, "w") as exapathfile: # exapathfile.write("PROTOPATH[" + telemetry_node + "]: " + protopath + "\n") exapathfile.write(jsonTelemetryData) exapathfile.write("\n")
def add_option(self, *arg, **kargs): envvar = kargs.get("env_name", None) try: del kargs["env_name"] except: pass required = kargs.get("required", False) try: del kargs["required"] except: pass new_help = kargs.get("help", "") dest = kargs.get("dest", None) if dest is not None: new_help = new_help + f" [Configkey {dest}]" code_default = kargs.get("default", None) if code_default is not None: new_help = new_help + f" [Default {code_default}]" else: if required: new_help = new_help + f" [Mandatory]" if dest is not None and dest in self.config: PMGRPCDLOG.debug("Getting data from %s from the env variable %s", arg[0], envvar) kargs["default"] = self.config[dest] if envvar is not None: new_help = new_help + " [Env {}]".format(envvar) kargs["help"] = new_help # Modify the default to be the one ine the env_name if envvar in os.environ: PMGRPCDLOG.debug( "Getting data from %s from the env variable %s", arg[0], envvar) kargs["default"] = os.environ[envvar] kargs["help"] = new_help # We only do this if action is not store action = kargs.get("action", "") if action == "store_true": if "default" in kargs and not isinstance(kargs["default"], bool): kargs["default"] = bool(strtobool(kargs["default"])) if "default" not in kargs: kargs["default"] = False option = super().add_option(*arg, **kargs) option.required = required return option
def manually_serialize(self): PMGRPCDLOG.info( "manually serialize with avscid (%s) and jsondatafile (%s)" % (lib_pmgrpcd.OPTIONS.avscid, lib_pmgrpcd.OPTIONS.jsondatafile)) avscid = int(lib_pmgrpcd.OPTIONS.avscid) avroinstance = self.getavro_schid_instance(avscid) with open(lib_pmgrpcd.OPTIONS.jsondatafile, "r") as jsondatahandler: jsondata = json.load(jsondatahandler) self.serialize(jsondata, lib_pmgrpcd.OPTIONS.topic, avscid, avroinstance)
def __init__(self, channel): self.channel = channel self.stub = gnmi_pb2_grpc.gNMIStub(self.channel) # ask for the capabilites #cap_req = gnmi_pb2.CapabilityRequest() #cap_res = self.stub.Capabilities(cap_req) self.encapsulation = gnmi_pb2.PROTO encoding_path = "/interfaces" path = gnmi_utils.simple_gnmi_string_parser(encoding_path) mysub = gnmi_pb2.Subscription(path=path, sample_interval=60*1000000000) mysubs = [mysub] mysblist = gnmi_pb2.SubscriptionList(prefix=None, encoding=self.encapsulation, subscription=mysubs) mysubreq = gnmi_pb2.SubscribeRequest( subscribe=mysblist ) def x(): yield mysubreq y = x() base_grpc = {"grpcPeer": self.channel._channel.target().decode(), "ne_vendor": "gnmi"} msgs = self.stub.Subscribe(y, None) for msg in msgs: if msg.HasField('update'): grpc = dict(base_grpc) data = {"node_id_str": "r33.labxtx01.us.bb"} notification = msg.update timestamp = notification.timestamp # in nanoseconds since epoch prefix = notification.prefix sensor_path, keys = gnmi_utils.gnmi_to_string_and_keys(prefix) data["encoding_path"] = sensor_path data["collection_timestamp"] = timestamp / 1000 data["keys"] = keys gnmi = [] header_info = None for upd in notification.update: upd_name, extra_keys = gnmi_utils.gnmi_to_string_and_keys(upd.path) try: value = getattr(upd.val, upd.val.WhichOneof("value")) except: breakpoint() if upd.val.WhichOneof("value") in ("leaflist_val", "any_val", "decimal_val"): value = str(value) if upd_name == "__juniper_telemetry_header__": header_bytes = value continue if extra_keys: breakpoint() gnmi.append({"keys": extra_keys, "name": upd_name, "value": value}) data["gnmi"] = gnmi message_dict = {"collector": {"grpc": grpc, "data": data}} try: returned = FinalizeTelemetryData(message_dict) except Exception as e: PMGRPCDLOG.error("Error finalazing message: %s", e)
def get_gpbmapfile(): global MAP_DICT if MAP_DICT is None: with open(lib_pmgrpcd.OPTIONS.gpbmapfile, "r") as file: MAP_DICT = {} for line in file: (k, v) = line.split("=") # a.e. "huawei-ifm" = 'huawei_ifm_pb2.Ifm()' MAP_DICT.update({k.lstrip().rstrip(): v.lstrip().rstrip()}) PMGRPCDLOG.debug("MAP_DICT: %s", MAP_DICT) return MAP_DICT
def MdtDialout(self, msg_iterator, context): try: grpcPeer = {} grpcPeerStr = context.peer() ( grpcPeer["telemetry_proto"], grpcPeer["telemetry_node"], grpcPeer["telemetry_node_port"], ) = grpcPeerStr.split(":") grpcPeer["ne_vendor"] = "Cisco" PMGRPCDLOG.debug("Cisco MdtDialout Message: %s" % grpcPeer["telemetry_node"]) # cisco_processing(grpcPeer, message, context) metadata = dict(context.invocation_metadata()) grpcPeer["user-agent"] = metadata["user-agent"] # Example of grpcPeerStr -> 'ipv4:10.215.133.23:57775' grpcPeer["grpc_processing"] = "cisco_grpc_dialout_pb2_grpc" grpcPeer["grpc_ulayer"] = "GPB Telemetry" jsonTelemetryNode = json.dumps(grpcPeer, indent=2, sort_keys=True) PMGRPCDLOG.debug("Cisco connection info: %s" % jsonTelemetryNode) for new_msg in msg_iterator: PMGRPCDLOG.debug("Cisco new_msg iteration message") # filter msgs that do not match the IP option if enabled. if lib_pmgrpcd.OPTIONS.ip: if grpcPeer["telemetry_node"] != lib_pmgrpcd.OPTIONS.ip: continue PMGRPCDLOG.debug("Cisco: ip filter matched with ip %s" % (lib_pmgrpcd.OPTIONS.ip)) try: cisco_processing(grpcPeer, new_msg) except Exception as e: PMGRPCDLOG.debug( "Error processing Cisco packet, error is %s", e) continue except Exception as e: print(type(e)) print(e.args) return yield
def select_gbp_methode(proto): try: map_dict = get_gpbmapfile() except: PMGRPCDLOG.error("Error getting the map dict") raise if proto in map_dict: PMGRPCDLOG.debug("I FOUND THE GPB (%s) FOR PROTO (%s)" % (proto, map_dict[proto])) # TODO: I am pretty sure we can do something better than this. msg = eval(map_dict[proto]) return msg else: PMGRPCDLOG.debug("MISSING GPB Methode for PROTO: %s", proto) lib_pmgrpcd.MISSGPBLIB.update({proto: str(datetime.now())}) return False
def main(): global CONFIGFILE usage_str = "%prog [options]" version_str = "%prog " + SCRIPTVERSION # We go over arguments very simply and obtaining the config file, if this one is available. config_file_flag = "-c" extra_argv = sys.argv[1:] config_file_args = None if extra_argv: if config_file_flag in extra_argv: index = extra_argv.index(config_file_flag) file_index = index + 1 try: config_file_args = extra_argv[file_index] except: pass if config_file_args is not None: CONFIGFILE = config_file_args # Load config. And make sure other files exists. config = configparser.ConfigParser() if os.path.isfile(CONFIGFILE): config.read(CONFIGFILE) if "PMGRPCD" not in config.sections(): raise FileNotFound("There is no PMGRPCD on configuration file") else: raise FileNotFound( "We could not find configuration file in {}".format(CONFIGFILE)) # Parse arguments. Default must be a named argument! parser = OptionParserEnv(usage=usage_str, version=version_str) # the next one is not really used, but important to avoid errors. parser.add_option( config_file_flag, default=str(DEFAULT_CONFIGFILE), dest="configuration", help="Path to configuration file", ) #gnmi options #parser.add_option( # "-g", # "--gnmi_enable", # default=config.getboolean("PMGRPCD", "gnmi_enable", fallback=False), # help="Boolean defining whether gnmi is enable (this disables the rest of collectrors)", #) #parser.add_option( # "--gnmi_target", # env_name = "GNMI_SERVER", # default=config.get("PMGRPCD", "gnmi_target", fallback=None), # help="The url of the gnmi target", #) parser.add_option( "-T", "--topic", env_name="PM_TOPIC", default=config.get("PMGRPCD", "topic", fallback=None), dest="topic", help="the json data are serialized to this topic", ) parser.add_option( "-B", "--bsservers", default=config.get("PMGRPCD", "bsservers", fallback=None), env_name="BSSERVERS", dest="bsservers", help="bootstrap servers url with port to reach kafka", ) parser.add_option( "-S", "--secproto", default=config.get("PMGRPCD", "secproto", fallback="ssl"), dest="secproto", help="security protocol (is normaly ssl)", ) parser.add_option( "-O", "--sslcertloc", env_name="SSLCERTLOC", default=config.get("PMGRPCD", "sslcertloc", fallback=None), dest="sslcertloc", help="path/file to ssl certification location", ) parser.add_option( "-K", "--sslkeyloc", env_name="SSLKEYLOC", default=config.get("PMGRPCD", "sslkeyloc", fallback=None), dest="sslkeyloc", help="path/file to ssl key location", ) parser.add_option( "-U", "--urlscreg", env_name="URLSCREG", default=config.get("PMGRPCD", "urlscreg", fallback=None), dest="urlscreg", help="the url to the schema-registry", ) parser.add_option( "-L", "--calocation", env_name="CALOCATION", default=config.get("PMGRPCD", "calocation", fallback=None), dest="calocation", help="the ca_location used to connect to schema-registry", ) parser.add_option( "-G", "--gpbmapfile", env_name="GPBMAPFILE", default=config.get("PMGRPCD", "gpbmapfile", fallback=None), dest="gpbmapfile", help="change path/name of gpbmapfile [default: %default]", ) parser.add_option( "-M", "--avscmapfile", env_name="AVSCMALFILE", default=config.get("PMGRPCD", "avscmapfile", fallback=None), dest="avscmapfile", help="path/name to the avscmapfile", ) parser.add_option( "-m", "--mitigation", action="store_true", default=config.getboolean("PMGRPCD", "mitigation"), dest="mitigation", help= "enable plugin mitigation mod_result_dict from python module mitigation.py", ) parser.add_option( "-d", "--debug", action="store_true", default=config.getboolean("PMGRPCD", "debug"), dest="debug", help="enable debug messages on the logfile", ) parser.add_option( "-l", "--PMGRPCDLOGfile", default=config.get("PMGRPCD", "PMGRPCDLOGfile"), dest="PMGRPCDLOGfile", help= "PMGRPCDLOGfile the logfile on the collector face with path/name [default: %default]", ) parser.add_option( "-a", "--serializelogfile", default=config.get("PMGRPCD", "serializelogfile"), dest="serializelogfile", help= "serializelogfile with path/name for kafka avro and zmq messages [default: %default]", ) parser.add_option( "-I", "--ipport", action="store", type="string", default=config.get("PMGRPCD", "ipport"), dest="ipport", help="change the ipport the daemon is listen on [default: %default]", ) parser.add_option( "-w", "--workers", action="store", type="int", default=config.get("PMGRPCD", "workers"), dest="workers", help="change the nr of paralell working processes [default: %default]", ) parser.add_option( "-C", "--cisco", action="store_true", default=config.getboolean("PMGRPCD", "cisco"), dest="cisco", help="enable the grpc messages comming from Cisco [default: %default]", ) parser.add_option( "-H", "--huawei", action="store_true", default=config.getboolean("PMGRPCD", "huawei"), dest="huawei", help="enable the grpc messages comming from Huawei [default: %default]", ) parser.add_option( "-t", "--cenctype", action="store", type="string", default=config.get("PMGRPCD", "cenctype"), dest="cenctype", help= "cenctype is the type of encoding for cisco. This is because some protofiles are incompatible. With cenctype=gpbkv only cisco is enabled. The encoding type can be json, gpbcomp, gpbkv [default: %default]", ) parser.add_option( "-e", "--example", action="store_true", default=config.getboolean("PMGRPCD", "example"), dest="example", help="Enable writing Example Json-Data-Files [default: %default]", ) parser.add_option( "-E", "--examplepath", default=config.get("PMGRPCD", "examplepath"), dest="examplepath", help="dump a json example of each proto/path to this examplepath", ) parser.add_option( "-j", "--jsondatadumpfile", dest="jsondatadumpfile", help="writing the output to the jsondatadumpfile path/name", ) parser.add_option( "-r", "--rawdatadumpfile", default=config.get("PMGRPCD", "rawdatadumpfile", fallback=None), dest="rawdatadumpfile", help= "writing the raw data from the routers to the rowdatafile path/name", ) parser.add_option( "-z", "--zmq", action="store_true", default=config.getboolean("PMGRPCD", "zmq"), dest="zmq", help="enable forwarding to ZMQ [default: %default]", ) parser.add_option( "-p", "--zmqipport", default=config.get("PMGRPCD", "zmqipport"), dest="zmqipport", help="define proto://ip:port of zmq socket bind [default: %default]", ) parser.add_option( "-k", "--kafkaavro", action="store_true", default=config.getboolean("PMGRPCD", "kafkaavro"), dest="kafkaavro", help= "enable forwarding to Kafka kafkaavro (with schema-registry) [default: %default]", ) parser.add_option( "-o", "--onlyopenconfig", action="store_true", default=config.getboolean("PMGRPCD", "onlyopenconfig"), dest="onlyopenconfig", help="only accept pakets of openconfig", ) parser.add_option("-i", "--ip", dest="ip", help="only accept pakets of this single ip") parser.add_option( "-A", "--avscid", dest="avscid", help= "this is to serialize manually with avscid and jsondatafile (for development)", ) parser.add_option( "-J", "--jsondatafile", dest="jsondatafile", help= "this is to serialize manually with avscid and jsondatafile (for development)", ) parser.add_option( "-R", "--rawdatafile", dest="rawdatafile", help= "this is to process manually (via mitigation) process a rawdatafile with a single rawrecord (for development)", ) parser.add_option( "-N", "--console", action="store_true", dest="console", help= "this is to display all log-messages also on console (for development)", ) parser.add_option("-v", action="store_true", dest="version", help="print version of this script") parser.add_option( "-s", "--kafkasimple", default=config.getboolean("PMGRPCD", "kafkasimple", fallback=False), dest="kafkasimple", help="Boolean if kafkasimple should be enabled.", ) parser.add_option( "--file_exporter_file", default=config.get("PMGRPCD", "file_exporter_file", fallback=None), dest="file_exporter_file", help="Name of file for file exporter.", ) parser.add_option( "--file_importer_file", default=config.get("PMGRPCD", "file_importer_file", fallback=None), dest="file_importer_file", help= "Name of the file to import. If set, we will ignore the rest of the importers.", ) (lib_pmgrpcd.OPTIONS, args) = parser.parse_args() init_pmgrpcdlog() init_serializelog() if lib_pmgrpcd.OPTIONS.version: print(parser.get_version()) raise SystemExit PMGRPCDLOG.info("Using %s as config file", CONFIGFILE) PMGRPCDLOG.info("startoptions of this script: %s", str(lib_pmgrpcd.OPTIONS)) # Test-Statements Logging # ----------------------- # PMGRPCDLOG.debug('debug message') # PMGRPCDLOG.info('info message') # PMGRPCDLOG.warning('warn message') # PMGRPCDLOG.error('error message') # PMGRPCDLOG.critical('critical message') # serializelog.debug('debug message') # serializelog.info('info message') # serializelog.warning('warn message') # serializelog.error('error message') # serializelog.critical('critical message') configure() PMGRPCDLOG.info("enable listening to SIGNAL USR1 with Sinalhandler") signal.signal(signal.SIGUSR1, signalhandler) PMGRPCDLOG.info("enable listening to SIGNAL USR2 with Sinalhandler") signal.signal(signal.SIGUSR2, signalhandler) # I am going to comment the manually export of data from now, this could go into other script. if lib_pmgrpcd.OPTIONS.avscid and lib_pmgrpcd.OPTIONS.jsondatafile: manually_serialize() elif lib_pmgrpcd.OPTIONS.file_importer_file: file_importer = FileInput(lib_pmgrpcd.OPTIONS.file_importer_file) PMGRPCDLOG.info("Starting file import") file_importer.generate() PMGRPCDLOG.info("No more data, sleeping 3 secs") time.sleep(3) PMGRPCDLOG.info("Finalizing file import") elif lib_pmgrpcd.OPTIONS.avscid or lib_pmgrpcd.OPTIONS.jsondatafile: PMGRPCDLOG.info( "manually serialize need both lib_pmgrpcd.OPTIONS avscid and jsondatafile" ) parser.print_help() #elif lib_pmgrpcd.OPTIONS.gnmi_enable: # if lib_pmgrpcd.OPTIONS.gnmi_target is None: # error = "gnmi target not configured, but gnmi enabled" # PMGRPCDLOG.error(error) # raise Exception(error) # # PMGRPCDLOG.info("Starting contact with gnmi server %s. Other functions will be ignored", lib_pmgrpcd.OPTIONS.gnmi_target) # channel = grpc.insecure_channel(lib_pmgrpcd.OPTIONS.gnmi_target) # gnmi_client = GNMIClient(channel) # breakpoint() else: # make sure some important files exist if not os.path.isfile(lib_pmgrpcd.OPTIONS.gpbmapfile): raise FileNotFound("No gpbmapfile file found in {}".format( lib_pmgrpcd.OPTIONS.gpbmapfile)) # TODO: Do we really need this always? if not os.path.isfile(lib_pmgrpcd.OPTIONS.avscmapfile): raise FileNotFound("No avscmapfile file found in {}".format( lib_pmgrpcd.OPTIONS.avscmapfile)) PMGRPCDLOG.info("pmgrpsd.py is started at %s", str(datetime.now())) serve()
def serve(): gRPCserver = grpc.server( futures.ThreadPoolExecutor(max_workers=lib_pmgrpcd.OPTIONS.workers)) if lib_pmgrpcd.OPTIONS.huawei: if lib_pmgrpcd.OPTIONS.cenctype == 'gpbkv': PMGRPCDLOG.info("Huawei is disabled because cenctype=gpbkv") else: PMGRPCDLOG.info("Huawei is enabled") # Ugly, but we have to load just here because if not there is an exception due to a conflict between the cisco and huawei protos. from huawei_pmgrpcd import gRPCDataserviceServicer huawei_grpc_dialout_pb2_grpc.add_gRPCDataserviceServicer_to_server( gRPCDataserviceServicer(), gRPCserver) else: PMGRPCDLOG.info("Huawei is disabled") if lib_pmgrpcd.OPTIONS.cisco: PMGRPCDLOG.info("Cisco is enabled") # Ugly, but we have to load just here because if not there is an exception due to a conflict between the cisco and huawei protos. from cisco_pmgrpcd import gRPCMdtDialoutServicer cisco_grpc_dialout_pb2_grpc.add_gRPCMdtDialoutServicer_to_server( gRPCMdtDialoutServicer(), gRPCserver) else: PMGRPCDLOG.info("Cisco is disabled") gRPCserver.add_insecure_port(lib_pmgrpcd.OPTIONS.ipport) gRPCserver.start() try: while True: time.sleep(_ONE_DAY_IN_SECONDS) except KeyboardInterrupt: gRPCserver.stop(0) PMGRPCDLOG.info("Stopping server") time.sleep(1)
def huawei_processing(grpcPeer, new_msg): PMGRPCDLOG.debug("Huawei: Received GRPC-Data") # dump the raw data if lib_pmgrpcd.OPTIONS.rawdatafile: PMGRPCDLOG.debug("Write rawdatafile: %s" % (lib_pmgrpcd.OPTIONS.rawdatafile)) with open(lib_pmgrpcd.OPTIONS.rawdatafile, "a") as rawdatafile: rawdatafile.write(base64.b64encode(new_msg.data).decode()) rawdatafile.write("\n") try: telemetry_msg = huawei_telemetry_pb2.Telemetry() telemetry_msg.ParseFromString(new_msg.data) except Exception as e: PMGRPCDLOG.error( "instancing or parsing data failed with huawei_telemetry_pb2.Telemetry" ) PMGRPCDLOG.error("ERROR: %s" % (e)) raise try: telemetry_msg_dict = MessageToDict( telemetry_msg, including_default_value_fields=True, preserving_proto_field_name=True, use_integers_for_enums=True, ) except Exception as e: PMGRPCDLOG.error( "instancing or parsing data failed with huawei_telemetry_pb2.Telemetry" ) raise PMGRPCDLOG.debug("Huawei: Received GPB-Data as JSON") # TODO: Do we really need this? it can be expensive PMGRPCDLOG.debug(json.dumps(telemetry_msg_dict, indent=2, sort_keys=True)) message_header_dict = telemetry_msg_dict.copy() if "data_gpb" in message_header_dict: del message_header_dict["data_gpb"] (proto, path) = message_header_dict["sensor_path"].split(":") (node_id_str) = message_header_dict["node_id_str"] (node_ip) = grpcPeer["telemetry_node"] (ne_vendor) = grpcPeer["ne_vendor"] # Get the maching L3-Methode msg = select_gbp_methode(proto) if msg: elem = len(telemetry_msg.data_gpb.row) epochmillis = int(round(time.time() * 1000)) PMGRPCDLOG.info( "EPOCH=%-10s NIP=%-15s NID=%-20s VEN=%-7s PT=%-22s ET=%-12s ELEM:%s" % (epochmillis, node_ip, node_id_str, ne_vendor, proto, "GPB", elem)) # L2: for new_row in telemetry_msg.data_gpb.row: # PMGRPCDLOG.info("NEW_ROW: %s" % (new_row)) new_row_header_dict = MessageToDict( new_row, including_default_value_fields=True, preserving_proto_field_name=True, use_integers_for_enums=True, ) if "content" in new_row_header_dict: del new_row_header_dict["content"] # L3: msg.ParseFromString(new_row.content) content = MessageToDict( msg, including_default_value_fields=True, preserving_proto_field_name=True, use_integers_for_enums=True, ) message_dict = {} message_dict.update({ "collector": { "grpc": { "grpcPeer": grpcPeer["telemetry_node"], "ne_vendor": grpcPeer["ne_vendor"], } } }) message_dict["collector"].update( {"data": message_header_dict.copy()}) message_dict["collector"]["data"].update(new_row_header_dict) message_dict.update(content) allkeys = parse_dict(content, ret="", level=0) PMGRPCDLOG.debug("Huawei: %s: %s" % (proto, allkeys)) try: returned = FinalizeTelemetryData(message_dict) except Exception as e: PMGRPCDLOG.error("Error finalazing message: %s", e)
def __init__(self): PMGRPCDLOG.info("Huawei: Initializing gRPCDataserviceServicer()")
def FinalizeTelemetryData(dictTelemetryData): # Adding epoch in millisecond to identify this singel metric on the way to the storage epochmillis = int(round(time.time() * 1000)) dictTelemetryData["collector"]["data"].update( {"collection_timestamp": epochmillis}) dictTelemetryData_mod = dictTelemetryData.copy() # Going over the mitigation library, if needed. # TODO: Simplify the next part dictTelemetryData_beforeencoding = None if lib_pmgrpcd.OPTIONS.mitigation: from mitigation import mod_all_json_data try: dictTelemetryData_mod = mod_all_json_data(dictTelemetryData_mod) dictTelemetryData_beforeencoding = dictTelemetryData_mod jsonTelemetryData = json.dumps(dictTelemetryData_mod, indent=2, sort_keys=True) except Exception as e: PMGRPCDLOG.info("ERROR: mod_all_json_data raised a error:\n%s") PMGRPCDLOG.info("ERROR: %s" % (e)) dictTelemetryData_mod = dictTelemetryData dictTelemetryData_beforeencoding = dictTelemetryData jsonTelemetryData = json.dumps(dictTelemetryData, indent=2, sort_keys=True) else: dictTelemetryData_mod = dictTelemetryData dictTelemetryData_beforeencoding = dictTelemetryData jsonTelemetryData = json.dumps(dictTelemetryData, indent=2, sort_keys=True) PMGRPCDLOG.debug("After mitigation: %s" % (jsonTelemetryData)) # Check if we need to transform. This will change later #breakpoint() if get_lock() else None path = dictTelemetryData_beforeencoding["collector"]["data"]["path"] actual_data = dictTelemetryData_beforeencoding.get(path, {}) #if path == "sys/intf": # return print(path) #breakpoint() if get_lock() else None if TRANSFORMATION and dictTelemetryData_beforeencoding and "dataGpbkv" in dictTelemetryData_beforeencoding.get( "collector", {}).get("data", {}): data = dictTelemetryData_beforeencoding["collector"]["data"].copy() data["dataGpbkv"] = [{"fields": actual_data}] # we just transform for kv metric = CiscoKVFlatten.build_from_dcit(data) internals = list(metric.get_internal()) #breakpoint() if get_lock() else None for internal in internals: for new_metric in TRANSFORMATION.transform(internal): print(new_metric.keys) data = new_metric.data data["dataGpbkv"] = new_metric.content export_metrics(json.dumps({"collector": {"data": data}})) #breakpoint() if get_lock() else None return jsonTelemetryData #breakpoint() if get_lock() else None if lib_pmgrpcd.OPTIONS.examplepath and lib_pmgrpcd.OPTIONS.example: examples(dictTelemetryData_mod, jsonTelemetryData) if lib_pmgrpcd.OPTIONS.jsondatadumpfile: PMGRPCDLOG.debug("Write jsondatadumpfile: %s" % (lib_pmgrpcd.OPTIONS.jsondatadumpfile)) with open(lib_pmgrpcd.OPTIONS.jsondatadumpfile, "a") as jsondatadumpfile: jsondatadumpfile.write(jsonTelemetryData) jsondatadumpfile.write("\n") # Filter only config. export = True if lib_pmgrpcd.OPTIONS.onlyopenconfig: PMGRPCDLOG.debug( "only openconfig filter matched because of options.onlyopenconfig: %s" % lib_pmgrpcd.OPTIONS.onlyopenconfig) export = False if "encoding_path" in dictTelemetryData_mod["collector"]["data"]: if ("openconfig" in dictTelemetryData_mod["collector"]["data"] ["encoding_path"]): export = True if export: export_metrics(jsonTelemetryData) return jsonTelemetryData
def cisco_processing(grpcPeer, new_msg): messages = {} grpc_message = {} encoding_type = None PMGRPCDLOG.debug("Cisco: Received GRPC-Data") PMGRPCDLOG.debug(new_msg.data) # dump the raw data if lib_pmgrpcd.OPTIONS.rawdatadumpfile: PMGRPCDLOG.debug("Write rawdatadumpfile: %s" % (lib_pmgrpcd.OPTIONS.rawdatafile)) with open(lib_pmgrpcd.OPTIONS.rawdatadumpfile, "a") as rawdatafile: rawdatafile.write(base64.b64encode(new_msg.data).decode()) rawdatafile.write("\n") # Find the encoding of the packet try: encoding_type, grpc_message = find_encoding_and_decode(new_msg) except Exception as e: PMGRPCDLOG.error("Error decoding packet. Error is {}".format(e)) PMGRPCDLOG.debug("encoding_type is: %s\n" % (encoding_type)) if (encoding_type == "unknown") or encoding_type is None: print("encoding_type is unknown.") if (encoding_type == "unknown") or encoding_type is None: raise Exception("Encoding type unknown") message_header_dict = grpc_message.copy() if "data_json" in message_header_dict: del message_header_dict["data_json"] PMGRPCDLOG.debug("Header:%s", message_header_dict) (node_ip) = grpcPeer["telemetry_node"] (ne_vendor) = grpcPeer["ne_vendor"] epochmillis = int(round(time.time() * 1000)) if encoding_type == "ciscojson": message_header_dict.update({"encoding_type": encoding_type}) (proto, path) = message_header_dict["encoding_path"].split(":") (node_id_str) = message_header_dict["node_id_str"] elem = len(grpc_message["data_json"]) messages = grpc_message["data_json"] elif encoding_type == "ciscogrpckv": message_header_dict.update({"encoding_type": encoding_type}) message_header_dict["encoding_path"] = message_header_dict.pop("encodingPath") message_header_dict["node_id_str"] = message_header_dict.pop("nodeIdStr") message_header_dict["msg_timestamp"] = message_header_dict.pop("msgTimestamp") message_header_dict["subscription_id_str"] = message_header_dict.pop( "subscriptionIdStr" ) full_ecoding_path = message_header_dict["encoding_path"] if ":" in full_ecoding_path: (proto, path) = message_header_dict["encoding_path"].split(":") else: proto = None path = full_ecoding_path (node_id_str) = message_header_dict["node_id_str"] if "dataGpbkv" in grpc_message: elem = len(grpc_message["dataGpbkv"]) messages = grpc_message["dataGpbkv"] else: elem = 0 messages = {} message_header_dict["path"] = path PMGRPCDLOG.info( "EPOCH=%-10s NIP=%-15s NID=%-20s VEN=%-7s PT=%-22s ET=%-12s ELEM=%s", epochmillis, node_ip, node_id_str, ne_vendor, proto, encoding_type, elem, ) # A single telemetry packet can contain multiple msgs (each having their own key/values). # here we are processing them one by one. for listelem in messages: # Copy the necessary metadata to the packet. PMGRPCDLOG.debug("LISTELEM: %s", listelem) message_dict = {} message_dict.update({"collector": {"grpc": {}}}) message_dict["collector"]["grpc"].update( {"grpcPeer": grpcPeer["telemetry_node"]} ) message_dict["collector"]["grpc"].update({"ne_vendor": grpcPeer["ne_vendor"]}) message_dict["collector"].update({"data": message_header_dict}) if encoding_type == "ciscojson": PMGRPCDLOG.debug("TEST: %s | %s", path, listelem["content"]) message_dict.update({path: listelem["content"]}) elif encoding_type == "ciscogrpckv": PMGRPCDLOG.debug("TEST: %s | %s", path, listelem["fields"]) message_dict.update({path: listelem["fields"]}) # allkeys = parse_dict(listelem, ret='', level=0) # PMGRPCDLOG.info("Cisco: %s: %s" % (proto, allkeys)) try: returned = FinalizeTelemetryData(message_dict) except Exception as e: PMGRPCDLOG.error("Error finalazing message: %s", e)
def __init__(self): PMGRPCDLOG.info("Cisco: Initializing gRPCMdtDialoutServicer()")
def find_encoding_and_decode(new_msg): encoding_type = None grpc_message = {} # TODO. If options force one type, only try that one. # Maybe it is json if lib_pmgrpcd.OPTIONS.cenctype == 'json': PMGRPCDLOG.debug("Try to parse json") try: grpc_message = json.loads(new_msg.data) encoding_type = "ciscojson" except Exception as e: PMGRPCDLOG.debug( "ERROR: Direct json parsing of grpc_message failed with message:\n%s\n", e ) else: return encoding_type, grpc_message elif lib_pmgrpcd.OPTIONS.cenctype == 'gpbkv': PMGRPCDLOG.debug("Try to unmarshall KV") if encoding_type is None: try: grpc_message = process_cisco_kv(new_msg) encoding_type = "ciscogrpckv" except Exception as e: PMGRPCDLOG.debug( "ERROR: Parsing of json after unmarshall KV failed with message:\n%s\n", e, ) else: return encoding_type, grpc_message elif lib_pmgrpcd.OPTIONS.cenctype == 'gpbcomp': PMGRPCDLOG.debug("Try to unmarshall compact mode") PMGRPCDLOG.debug("TODO") encoding_type = "unknown" return encoding_type, grpc_message