def __init__(self, channel): self.channel = channel self.stub = gnmi_pb2_grpc.gNMIStub(self.channel) # ask for the capabilites #cap_req = gnmi_pb2.CapabilityRequest() #cap_res = self.stub.Capabilities(cap_req) self.encapsulation = gnmi_pb2.PROTO encoding_path = "/interfaces" path = gnmi_utils.simple_gnmi_string_parser(encoding_path) mysub = gnmi_pb2.Subscription(path=path, sample_interval=60*1000000000) mysubs = [mysub] mysblist = gnmi_pb2.SubscriptionList(prefix=None, encoding=self.encapsulation, subscription=mysubs) mysubreq = gnmi_pb2.SubscribeRequest( subscribe=mysblist ) def x(): yield mysubreq y = x() base_grpc = {"grpcPeer": self.channel._channel.target().decode(), "ne_vendor": "gnmi"} msgs = self.stub.Subscribe(y, None) for msg in msgs: if msg.HasField('update'): grpc = dict(base_grpc) data = {"node_id_str": "r33.labxtx01.us.bb"} notification = msg.update timestamp = notification.timestamp # in nanoseconds since epoch prefix = notification.prefix sensor_path, keys = gnmi_utils.gnmi_to_string_and_keys(prefix) data["encoding_path"] = sensor_path data["collection_timestamp"] = timestamp / 1000 data["keys"] = keys gnmi = [] header_info = None for upd in notification.update: upd_name, extra_keys = gnmi_utils.gnmi_to_string_and_keys(upd.path) try: value = getattr(upd.val, upd.val.WhichOneof("value")) except: breakpoint() if upd.val.WhichOneof("value") in ("leaflist_val", "any_val", "decimal_val"): value = str(value) if upd_name == "__juniper_telemetry_header__": header_bytes = value continue if extra_keys: breakpoint() gnmi.append({"keys": extra_keys, "name": upd_name, "value": value}) data["gnmi"] = gnmi message_dict = {"collector": {"grpc": grpc, "data": data}} try: returned = FinalizeTelemetryData(message_dict) except Exception as e: PMGRPCDLOG.error("Error finalazing message: %s", e)
def select_gbp_methode(proto): try: map_dict = get_gpbmapfile() except: PMGRPCDLOG.error("Error getting the map dict") raise if proto in map_dict: PMGRPCDLOG.debug("I FOUND THE GPB (%s) FOR PROTO (%s)" % (proto, map_dict[proto])) # TODO: I am pretty sure we can do something better than this. msg = eval(map_dict[proto]) return msg else: PMGRPCDLOG.debug("MISSING GPB Methode for PROTO: %s", proto) lib_pmgrpcd.MISSGPBLIB.update({proto: str(datetime.now())}) return False
def huawei_processing(grpcPeer, new_msg): PMGRPCDLOG.debug("Huawei: Received GRPC-Data") # dump the raw data if lib_pmgrpcd.OPTIONS.rawdatafile: PMGRPCDLOG.debug("Write rawdatafile: %s" % (lib_pmgrpcd.OPTIONS.rawdatafile)) with open(lib_pmgrpcd.OPTIONS.rawdatafile, "a") as rawdatafile: rawdatafile.write(base64.b64encode(new_msg.data).decode()) rawdatafile.write("\n") try: telemetry_msg = huawei_telemetry_pb2.Telemetry() telemetry_msg.ParseFromString(new_msg.data) except Exception as e: PMGRPCDLOG.error( "instancing or parsing data failed with huawei_telemetry_pb2.Telemetry" ) PMGRPCDLOG.error("ERROR: %s" % (e)) raise try: telemetry_msg_dict = MessageToDict( telemetry_msg, including_default_value_fields=True, preserving_proto_field_name=True, use_integers_for_enums=True, ) except Exception as e: PMGRPCDLOG.error( "instancing or parsing data failed with huawei_telemetry_pb2.Telemetry" ) raise PMGRPCDLOG.debug("Huawei: Received GPB-Data as JSON") # TODO: Do we really need this? it can be expensive PMGRPCDLOG.debug(json.dumps(telemetry_msg_dict, indent=2, sort_keys=True)) message_header_dict = telemetry_msg_dict.copy() if "data_gpb" in message_header_dict: del message_header_dict["data_gpb"] (proto, path) = message_header_dict["sensor_path"].split(":") (node_id_str) = message_header_dict["node_id_str"] (node_ip) = grpcPeer["telemetry_node"] (ne_vendor) = grpcPeer["ne_vendor"] # Get the maching L3-Methode msg = select_gbp_methode(proto) if msg: elem = len(telemetry_msg.data_gpb.row) epochmillis = int(round(time.time() * 1000)) PMGRPCDLOG.info( "EPOCH=%-10s NIP=%-15s NID=%-20s VEN=%-7s PT=%-22s ET=%-12s ELEM:%s" % (epochmillis, node_ip, node_id_str, ne_vendor, proto, "GPB", elem)) # L2: for new_row in telemetry_msg.data_gpb.row: # PMGRPCDLOG.info("NEW_ROW: %s" % (new_row)) new_row_header_dict = MessageToDict( new_row, including_default_value_fields=True, preserving_proto_field_name=True, use_integers_for_enums=True, ) if "content" in new_row_header_dict: del new_row_header_dict["content"] # L3: msg.ParseFromString(new_row.content) content = MessageToDict( msg, including_default_value_fields=True, preserving_proto_field_name=True, use_integers_for_enums=True, ) message_dict = {} message_dict.update({ "collector": { "grpc": { "grpcPeer": grpcPeer["telemetry_node"], "ne_vendor": grpcPeer["ne_vendor"], } } }) message_dict["collector"].update( {"data": message_header_dict.copy()}) message_dict["collector"]["data"].update(new_row_header_dict) message_dict.update(content) allkeys = parse_dict(content, ret="", level=0) PMGRPCDLOG.debug("Huawei: %s: %s" % (proto, allkeys)) try: returned = FinalizeTelemetryData(message_dict) except Exception as e: PMGRPCDLOG.error("Error finalazing message: %s", e)
def cisco_processing(grpcPeer, new_msg): messages = {} grpc_message = {} encoding_type = None PMGRPCDLOG.debug("Cisco: Received GRPC-Data") PMGRPCDLOG.debug(new_msg.data) # dump the raw data if lib_pmgrpcd.OPTIONS.rawdatadumpfile: PMGRPCDLOG.debug("Write rawdatadumpfile: %s" % (lib_pmgrpcd.OPTIONS.rawdatafile)) with open(lib_pmgrpcd.OPTIONS.rawdatadumpfile, "a") as rawdatafile: rawdatafile.write(base64.b64encode(new_msg.data).decode()) rawdatafile.write("\n") # Find the encoding of the packet try: encoding_type, grpc_message = find_encoding_and_decode(new_msg) except Exception as e: PMGRPCDLOG.error("Error decoding packet. Error is {}".format(e)) PMGRPCDLOG.debug("encoding_type is: %s\n" % (encoding_type)) if (encoding_type == "unknown") or encoding_type is None: print("encoding_type is unknown.") if (encoding_type == "unknown") or encoding_type is None: raise Exception("Encoding type unknown") message_header_dict = grpc_message.copy() if "data_json" in message_header_dict: del message_header_dict["data_json"] PMGRPCDLOG.debug("Header:%s", message_header_dict) (node_ip) = grpcPeer["telemetry_node"] (ne_vendor) = grpcPeer["ne_vendor"] epochmillis = int(round(time.time() * 1000)) if encoding_type == "ciscojson": message_header_dict.update({"encoding_type": encoding_type}) (proto, path) = message_header_dict["encoding_path"].split(":") (node_id_str) = message_header_dict["node_id_str"] elem = len(grpc_message["data_json"]) messages = grpc_message["data_json"] elif encoding_type == "ciscogrpckv": message_header_dict.update({"encoding_type": encoding_type}) message_header_dict["encoding_path"] = message_header_dict.pop("encodingPath") message_header_dict["node_id_str"] = message_header_dict.pop("nodeIdStr") message_header_dict["msg_timestamp"] = message_header_dict.pop("msgTimestamp") message_header_dict["subscription_id_str"] = message_header_dict.pop( "subscriptionIdStr" ) full_ecoding_path = message_header_dict["encoding_path"] if ":" in full_ecoding_path: (proto, path) = message_header_dict["encoding_path"].split(":") else: proto = None path = full_ecoding_path (node_id_str) = message_header_dict["node_id_str"] if "dataGpbkv" in grpc_message: elem = len(grpc_message["dataGpbkv"]) messages = grpc_message["dataGpbkv"] else: elem = 0 messages = {} message_header_dict["path"] = path PMGRPCDLOG.info( "EPOCH=%-10s NIP=%-15s NID=%-20s VEN=%-7s PT=%-22s ET=%-12s ELEM=%s", epochmillis, node_ip, node_id_str, ne_vendor, proto, encoding_type, elem, ) # A single telemetry packet can contain multiple msgs (each having their own key/values). # here we are processing them one by one. for listelem in messages: # Copy the necessary metadata to the packet. PMGRPCDLOG.debug("LISTELEM: %s", listelem) message_dict = {} message_dict.update({"collector": {"grpc": {}}}) message_dict["collector"]["grpc"].update( {"grpcPeer": grpcPeer["telemetry_node"]} ) message_dict["collector"]["grpc"].update({"ne_vendor": grpcPeer["ne_vendor"]}) message_dict["collector"].update({"data": message_header_dict}) if encoding_type == "ciscojson": PMGRPCDLOG.debug("TEST: %s | %s", path, listelem["content"]) message_dict.update({path: listelem["content"]}) elif encoding_type == "ciscogrpckv": PMGRPCDLOG.debug("TEST: %s | %s", path, listelem["fields"]) message_dict.update({path: listelem["fields"]}) # allkeys = parse_dict(listelem, ret='', level=0) # PMGRPCDLOG.info("Cisco: %s: %s" % (proto, allkeys)) try: returned = FinalizeTelemetryData(message_dict) except Exception as e: PMGRPCDLOG.error("Error finalazing message: %s", e)
def main(): global CONFIGFILE usage_str = "%prog [options]" version_str = "%prog " + SCRIPTVERSION # We go over arguments very simply and obtaining the config file, if this one is available. config_file_flag = "-c" extra_argv = sys.argv[1:] config_file_args = None if extra_argv: if config_file_flag in extra_argv: index = extra_argv.index(config_file_flag) file_index = index + 1 try: config_file_args = extra_argv[file_index] except: pass if config_file_args is not None: CONFIGFILE = config_file_args # Load config. And make sure other files exists. config = configparser.ConfigParser() if os.path.isfile(CONFIGFILE): config.read(CONFIGFILE) if "PMGRPCD" not in config.sections(): raise FileNotFound("There is no PMGRPCD on configuration file") else: raise FileNotFound( "We could not find configuration file in {}".format(CONFIGFILE)) # Parse arguments. Default must be a named argument! parser = OptionParserEnv(usage=usage_str, version=version_str) # the next one is not really used, but important to avoid errors. parser.add_option( config_file_flag, default=str(DEFAULT_CONFIGFILE), dest="configuration", help="Path to configuration file", ) #gnmi options parser.add_option( "-g", "--gnmi_enable", default=config.getboolean("PMGRPCD", "gnmi_enable", fallback=False), help= "Boolean defining whether gnmi is enable (this disables the rest of collectrors)", ) parser.add_option( "--gnmi_target", env_name="GNMMI_SERVER", default=config.get("PMGRPCD", "gnmi_target", fallback=None), help="The url of the gnmi target", ) parser.add_option( "-T", "--topic", env_name="PM_TOPIC", default=config.get("PMGRPCD", "topic", fallback=None), dest="topic", help="the json data are serialized to this topic", ) parser.add_option( "-B", "--bsservers", default=config.get("PMGRPCD", "bsservers", fallback=None), env_name="BSSERVERS", dest="bsservers", help="bootstrap servers url with port to reach kafka", ) parser.add_option( "-S", "--secproto", default=config.get("PMGRPCD", "secproto", fallback="ssl"), dest="secproto", help="security protocol (is normaly ssl)", ) parser.add_option( "-O", "--sslcertloc", env_name="SSLCERTLOC", default=config.get("PMGRPCD", "sslcertloc", fallback=None), dest="sslcertloc", help="path/file to ssl certification location", ) parser.add_option( "-K", "--sslkeyloc", env_name="SSLKEYLOC", default=config.get("PMGRPCD", "sslkeyloc", fallback=None), dest="sslkeyloc", help="path/file to ssl key location", ) parser.add_option( "-U", "--urlscreg", env_name="URLSCREG", default=config.get("PMGRPCD", "urlscreg", fallback=None), dest="urlscreg", help="the url to the schema-registry", ) parser.add_option( "-L", "--calocation", env_name="CALOCATION", default=config.get("PMGRPCD", "calocation", fallback=None), dest="calocation", help="the ca_location used to connect to schema-registry", ) parser.add_option( "-G", "--gpbmapfile", env_name="GPBMAPFILE", default=config.get("PMGRPCD", "gpbmapfile", fallback=None), dest="gpbmapfile", help="change path/name of gpbmapfile [default: %default]", ) parser.add_option( "-M", "--avscmapfile", env_name="AVSCMALFILE", default=config.get("PMGRPCD", "avscmapfile", fallback=None), dest="avscmapfile", help="path/name to the avscmapfile", ) parser.add_option( "-m", "--mitigation", action="store_true", default=config.getboolean("PMGRPCD", "mitigation"), dest="mitigation", help= "enable plugin mitigation mod_result_dict from python module mitigation.py", ) parser.add_option( "-d", "--debug", action="store_true", default=config.getboolean("PMGRPCD", "debug"), dest="debug", help="enable debug messages on the logfile", ) parser.add_option( "-l", "--PMGRPCDLOGfile", default=config.get("PMGRPCD", "PMGRPCDLOGfile"), dest="PMGRPCDLOGfile", help= "PMGRPCDLOGfile the logfile on the collector face with path/name [default: %default]", ) parser.add_option( "-a", "--serializelogfile", default=config.get("PMGRPCD", "serializelogfile"), dest="serializelogfile", help= "serializelogfile with path/name for kafka avro and zmq messages [default: %default]", ) parser.add_option( "-I", "--ipport", action="store", type="string", default=config.get("PMGRPCD", "ipport"), dest="ipport", help="change the ipport the daemon is listen on [default: %default]", ) parser.add_option( "-w", "--workers", action="store", type="int", default=config.get("PMGRPCD", "workers"), dest="workers", help="change the nr of paralell working processes [default: %default]", ) parser.add_option( "-C", "--cisco", action="store_true", default=config.getboolean("PMGRPCD", "cisco"), dest="cisco", help="enable the grpc messages comming from Cisco [default: %default]", ) parser.add_option( "-H", "--huawei", action="store_true", default=config.getboolean("PMGRPCD", "huawei"), dest="huawei", help="enable the grpc messages comming from Huawei [default: %default]", ) parser.add_option( "-t", "--cenctype", action="store", type="string", default=config.get("PMGRPCD", "cenctype"), dest="cenctype", help= "cenctype is the type of encoding for cisco. This is because some protofiles are incompatible. With cenctype=gpbkv only cisco is enabled. The encoding type can be json, gpbcomp, gpbkv [default: %default]", ) parser.add_option( "-e", "--example", action="store_true", default=config.getboolean("PMGRPCD", "example"), dest="example", help="Enable writing Example Json-Data-Files [default: %default]", ) parser.add_option( "-E", "--examplepath", default=config.get("PMGRPCD", "examplepath"), dest="examplepath", help="dump a json example of each proto/path to this examplepath", ) parser.add_option( "-j", "--jsondatadumpfile", dest="jsondatadumpfile", help="writing the output to the jsondatadumpfile path/name", ) parser.add_option( "-r", "--rawdatadumpfile", default=config.get("PMGRPCD", "rawdatadumpfile", fallback=None), dest="rawdatadumpfile", help= "writing the raw data from the routers to the rowdatafile path/name", ) parser.add_option( "-z", "--zmq", action="store_true", default=config.getboolean("PMGRPCD", "zmq"), dest="zmq", help="enable forwarding to ZMQ [default: %default]", ) parser.add_option( "-p", "--zmqipport", default=config.get("PMGRPCD", "zmqipport"), dest="zmqipport", help="define proto://ip:port of zmq socket bind [default: %default]", ) parser.add_option( "-k", "--kafkaavro", action="store_true", default=config.getboolean("PMGRPCD", "kafkaavro"), dest="kafkaavro", help= "enable forwarding to Kafka kafkaavro (with schema-registry) [default: %default]", ) parser.add_option( "-o", "--onlyopenconfig", action="store_true", default=config.getboolean("PMGRPCD", "onlyopenconfig"), dest="onlyopenconfig", help="only accept pakets of openconfig", ) parser.add_option("-i", "--ip", dest="ip", help="only accept pakets of this single ip") parser.add_option( "-A", "--avscid", dest="avscid", help= "this is to serialize manually with avscid and jsondatafile (for development)", ) parser.add_option( "-J", "--jsondatafile", dest="jsondatafile", help= "this is to serialize manually with avscid and jsondatafile (for development)", ) parser.add_option( "-R", "--rawdatafile", dest="rawdatafile", help= "this is to process manually (via mitigation) process a rawdatafile with a single rawrecord (for development)", ) parser.add_option( "-N", "--console", action="store_true", dest="console", help= "this is to display all log-messages also on console (for development)", ) parser.add_option("-v", action="store_true", dest="version", help="print version of this script") parser.add_option( "-s", "--kafkasimple", default=config.getboolean("PMGRPCD", "kafkasimple", fallback=False), dest="kafkasimple", help="Boolean if kafkasimple should be enabled.", ) parser.add_option( "--file_exporter_file", default=config.get("PMGRPCD", "file_exporter_file", fallback=None), dest="file_exporter_file", help="Name of file for file exporter.", ) parser.add_option( "--file_importer_file", default=config.get("PMGRPCD", "file_importer_file", fallback=None), dest="file_importer_file", help= "Name of the file to import. If set, we will ignore the rest of the importers.", ) (lib_pmgrpcd.OPTIONS, args) = parser.parse_args() init_pmgrpcdlog() init_serializelog() if lib_pmgrpcd.OPTIONS.version: print(parser.get_version()) raise SystemExit PMGRPCDLOG.info("Using %s as config file", CONFIGFILE) PMGRPCDLOG.info("startoptions of this script: %s", str(lib_pmgrpcd.OPTIONS)) # Test-Statements Logging # ----------------------- # PMGRPCDLOG.debug('debug message') # PMGRPCDLOG.info('info message') # PMGRPCDLOG.warning('warn message') # PMGRPCDLOG.error('error message') # PMGRPCDLOG.critical('critical message') # serializelog.debug('debug message') # serializelog.info('info message') # serializelog.warning('warn message') # serializelog.error('error message') # serializelog.critical('critical message') configure() PMGRPCDLOG.info("enable listening to SIGNAL USR1 with Sinalhandler") signal.signal(signal.SIGUSR1, signalhandler) PMGRPCDLOG.info("enable listening to SIGNAL USR2 with Sinalhandler") signal.signal(signal.SIGUSR2, signalhandler) # I am going to comment the manually export of data from now, this could go into other script. if lib_pmgrpcd.OPTIONS.avscid and lib_pmgrpcd.OPTIONS.jsondatafile: manually_serialize() elif lib_pmgrpcd.OPTIONS.file_importer_file: file_importer = FileInput(lib_pmgrpcd.OPTIONS.file_importer_file) PMGRPCDLOG.info("Starting file import") file_importer.generate() PMGRPCDLOG.info("No more data, sleeping 3 secs") time.sleep(3) PMGRPCDLOG.info("Finalizing file import") elif lib_pmgrpcd.OPTIONS.avscid or lib_pmgrpcd.OPTIONS.jsondatafile: PMGRPCDLOG.info( "manually serialize need both lib_pmgrpcd.OPTIONS avscid and jsondatafile" ) parser.print_help() elif lib_pmgrpcd.OPTIONS.gnmi_enable: if lib_pmgrpcd.OPTIONS.gnmi_target is None: error = "gnmi target not configured, but gnmi enabled" PMGRPCDLOG.error(error) raise Exception(error) PMGRPCDLOG.info( "Starting contact with gnmi server %s. Other functions will be ignored", lib_pmgrpcd.OPTIONS.gnmi_target) channel = grpc.insecure_channel(lib_pmgrpcd.OPTIONS.gnmi_target) gnmi_client = GNMIClient(channel) breakpoint() else: # make sure some important files exist if not os.path.isfile(lib_pmgrpcd.OPTIONS.gpbmapfile): raise FileNotFound("No gpbmapfile file found in {}".format( lib_pmgrpcd.OPTIONS.gpbmapfile)) # TODO: Do we really need this always? if not os.path.isfile(lib_pmgrpcd.OPTIONS.avscmapfile): raise FileNotFound("No avscmapfile file found in {}".format( lib_pmgrpcd.OPTIONS.avscmapfile)) PMGRPCDLOG.info("pmgrpsd.py is started at %s", str(datetime.now())) serve()