Ejemplo n.º 1
0
def FinalizeTelemetryData(dictTelemetryData):

    # Adding epoch in millisecond to identify this singel metric on the way to the storage
    epochmillis = int(round(time.time() * 1000))
    dictTelemetryData["collector"]["data"].update(
        {"collection_timestamp": epochmillis})

    dictTelemetryData_mod = dictTelemetryData.copy()

    # Going over the mitigation library, if needed.
    # TODO: Simplify the next part
    if lib_pmgrpcd.OPTIONS.mitigation:
        from mitigation import mod_all_json_data
        try:
            dictTelemetryData_mod = mod_all_json_data(dictTelemetryData_mod)
            jsonTelemetryData = json.dumps(dictTelemetryData_mod,
                                           indent=2,
                                           sort_keys=True)
        except Exception as e:
            PMGRPCDLOG.info("ERROR: mod_all_json_data raised a error:\n%s")
            PMGRPCDLOG.info("ERROR: %s" % (e))
            dictTelemetryData_mod = dictTelemetryData
            jsonTelemetryData = json.dumps(dictTelemetryData,
                                           indent=2,
                                           sort_keys=True)
    else:
        dictTelemetryData_mod = dictTelemetryData
        jsonTelemetryData = json.dumps(dictTelemetryData,
                                       indent=2,
                                       sort_keys=True)

    PMGRPCDLOG.debug("After mitigation: %s" % (jsonTelemetryData))

    if lib_pmgrpcd.OPTIONS.examplepath and lib_pmgrpcd.OPTIONS.example:
        examples(dictTelemetryData_mod, jsonTelemetryData)

    if lib_pmgrpcd.OPTIONS.jsondatadumpfile:
        PMGRPCDLOG.debug("Write jsondatadumpfile: %s" %
                         (lib_pmgrpcd.OPTIONS.jsondatadumpfile))
        with open(lib_pmgrpcd.OPTIONS.jsondatadumpfile,
                  "a") as jsondatadumpfile:
            jsondatadumpfile.write(jsonTelemetryData)
            jsondatadumpfile.write("\n")

    # Filter only config.
    export = True
    if lib_pmgrpcd.OPTIONS.onlyopenconfig:
        PMGRPCDLOG.debug(
            "only openconfig filter matched because of options.onlyopenconfig: %s"
            % lib_pmgrpcd.OPTIONS.onlyopenconfig)
        export = False
        if "encoding_path" in dictTelemetryData_mod["collector"]["data"]:
            if ("openconfig" in dictTelemetryData_mod["collector"]["data"]
                ["encoding_path"]):
                export = True

    if export:
        export_metrics(jsonTelemetryData)

    return jsonTelemetryData
Ejemplo n.º 2
0
 def manually_serialize(self):
     PMGRPCDLOG.info(
         "manually serialize with  avscid (%s) and jsondatafile (%s)" %
         (lib_pmgrpcd.OPTIONS.avscid, lib_pmgrpcd.OPTIONS.jsondatafile))
     avscid = int(lib_pmgrpcd.OPTIONS.avscid)
     avroinstance = self.getavro_schid_instance(avscid)
     with open(lib_pmgrpcd.OPTIONS.jsondatafile, "r") as jsondatahandler:
         jsondata = json.load(jsondatahandler)
     self.serialize(jsondata, lib_pmgrpcd.OPTIONS.topic, avscid,
                    avroinstance)
Ejemplo n.º 3
0
def main():
    global CONFIGFILE
    usage_str = "%prog [options]"
    version_str = "%prog " + SCRIPTVERSION

    # We go over arguments very simply and obtaining the config file, if this one is available.
    config_file_flag = "-c"
    extra_argv = sys.argv[1:]
    config_file_args = None
    if extra_argv:
        if config_file_flag in extra_argv:
            index = extra_argv.index(config_file_flag)
            file_index = index + 1
            try:
                config_file_args = extra_argv[file_index]
            except:
                pass
    if config_file_args is not None:
        CONFIGFILE = config_file_args

    # Load config.  And make sure other files exists.
    config = configparser.ConfigParser()
    if os.path.isfile(CONFIGFILE):
        config.read(CONFIGFILE)
        if "PMGRPCD" not in config.sections():
            raise FileNotFound("There is no PMGRPCD on configuration file")
    else:
        raise FileNotFound(
            "We could not find configuration file in {}".format(CONFIGFILE))

    # Parse arguments. Default must be a named argument!
    parser = OptionParserEnv(usage=usage_str, version=version_str)
    # the next one is not really used, but important to avoid errors.
    parser.add_option(
        config_file_flag,
        default=str(DEFAULT_CONFIGFILE),
        dest="configuration",
        help="Path to configuration file",
    )
    #gnmi options
    #parser.add_option(
    #    "-g",
    #    "--gnmi_enable",
    #    default=config.getboolean("PMGRPCD", "gnmi_enable", fallback=False),
    #    help="Boolean defining whether gnmi is enable (this disables the rest of collectrors)",
    #)
    #parser.add_option(
    #    "--gnmi_target",
    #    env_name = "GNMI_SERVER",
    #    default=config.get("PMGRPCD", "gnmi_target", fallback=None),
    #    help="The url of the gnmi target",
    #)

    parser.add_option(
        "-T",
        "--topic",
        env_name="PM_TOPIC",
        default=config.get("PMGRPCD", "topic", fallback=None),
        dest="topic",
        help="the json data are serialized to this topic",
    )
    parser.add_option(
        "-B",
        "--bsservers",
        default=config.get("PMGRPCD", "bsservers", fallback=None),
        env_name="BSSERVERS",
        dest="bsservers",
        help="bootstrap servers url with port to reach kafka",
    )
    parser.add_option(
        "-S",
        "--secproto",
        default=config.get("PMGRPCD", "secproto", fallback="ssl"),
        dest="secproto",
        help="security protocol (is normaly ssl)",
    )
    parser.add_option(
        "-O",
        "--sslcertloc",
        env_name="SSLCERTLOC",
        default=config.get("PMGRPCD", "sslcertloc", fallback=None),
        dest="sslcertloc",
        help="path/file to ssl certification location",
    )
    parser.add_option(
        "-K",
        "--sslkeyloc",
        env_name="SSLKEYLOC",
        default=config.get("PMGRPCD", "sslkeyloc", fallback=None),
        dest="sslkeyloc",
        help="path/file to ssl key location",
    )
    parser.add_option(
        "-U",
        "--urlscreg",
        env_name="URLSCREG",
        default=config.get("PMGRPCD", "urlscreg", fallback=None),
        dest="urlscreg",
        help="the url to the schema-registry",
    )
    parser.add_option(
        "-L",
        "--calocation",
        env_name="CALOCATION",
        default=config.get("PMGRPCD", "calocation", fallback=None),
        dest="calocation",
        help="the ca_location used to connect to schema-registry",
    )
    parser.add_option(
        "-G",
        "--gpbmapfile",
        env_name="GPBMAPFILE",
        default=config.get("PMGRPCD", "gpbmapfile", fallback=None),
        dest="gpbmapfile",
        help="change path/name of gpbmapfile [default: %default]",
    )
    parser.add_option(
        "-M",
        "--avscmapfile",
        env_name="AVSCMALFILE",
        default=config.get("PMGRPCD", "avscmapfile", fallback=None),
        dest="avscmapfile",
        help="path/name to the avscmapfile",
    )
    parser.add_option(
        "-m",
        "--mitigation",
        action="store_true",
        default=config.getboolean("PMGRPCD", "mitigation"),
        dest="mitigation",
        help=
        "enable plugin mitigation mod_result_dict from python module mitigation.py",
    )
    parser.add_option(
        "-d",
        "--debug",
        action="store_true",
        default=config.getboolean("PMGRPCD", "debug"),
        dest="debug",
        help="enable debug messages on the logfile",
    )
    parser.add_option(
        "-l",
        "--PMGRPCDLOGfile",
        default=config.get("PMGRPCD", "PMGRPCDLOGfile"),
        dest="PMGRPCDLOGfile",
        help=
        "PMGRPCDLOGfile the logfile on the collector face with path/name [default: %default]",
    )
    parser.add_option(
        "-a",
        "--serializelogfile",
        default=config.get("PMGRPCD", "serializelogfile"),
        dest="serializelogfile",
        help=
        "serializelogfile with path/name for kafka avro and zmq messages [default: %default]",
    )
    parser.add_option(
        "-I",
        "--ipport",
        action="store",
        type="string",
        default=config.get("PMGRPCD", "ipport"),
        dest="ipport",
        help="change the ipport the daemon is listen on [default: %default]",
    )
    parser.add_option(
        "-w",
        "--workers",
        action="store",
        type="int",
        default=config.get("PMGRPCD", "workers"),
        dest="workers",
        help="change the nr of paralell working processes [default: %default]",
    )
    parser.add_option(
        "-C",
        "--cisco",
        action="store_true",
        default=config.getboolean("PMGRPCD", "cisco"),
        dest="cisco",
        help="enable the grpc messages comming from Cisco [default: %default]",
    )
    parser.add_option(
        "-H",
        "--huawei",
        action="store_true",
        default=config.getboolean("PMGRPCD", "huawei"),
        dest="huawei",
        help="enable the grpc messages comming from Huawei [default: %default]",
    )
    parser.add_option(
        "-t",
        "--cenctype",
        action="store",
        type="string",
        default=config.get("PMGRPCD", "cenctype"),
        dest="cenctype",
        help=
        "cenctype is the type of encoding for cisco. This is because some protofiles are incompatible. With cenctype=gpbkv only cisco is enabled. The encoding type can be json, gpbcomp, gpbkv [default: %default]",
    )
    parser.add_option(
        "-e",
        "--example",
        action="store_true",
        default=config.getboolean("PMGRPCD", "example"),
        dest="example",
        help="Enable writing Example Json-Data-Files [default: %default]",
    )
    parser.add_option(
        "-E",
        "--examplepath",
        default=config.get("PMGRPCD", "examplepath"),
        dest="examplepath",
        help="dump a json example of each proto/path to this examplepath",
    )
    parser.add_option(
        "-j",
        "--jsondatadumpfile",
        dest="jsondatadumpfile",
        help="writing the output to the jsondatadumpfile path/name",
    )
    parser.add_option(
        "-r",
        "--rawdatadumpfile",
        default=config.get("PMGRPCD", "rawdatadumpfile", fallback=None),
        dest="rawdatadumpfile",
        help=
        "writing the raw data from the routers to the rowdatafile path/name",
    )
    parser.add_option(
        "-z",
        "--zmq",
        action="store_true",
        default=config.getboolean("PMGRPCD", "zmq"),
        dest="zmq",
        help="enable forwarding to ZMQ [default: %default]",
    )
    parser.add_option(
        "-p",
        "--zmqipport",
        default=config.get("PMGRPCD", "zmqipport"),
        dest="zmqipport",
        help="define proto://ip:port of zmq socket bind [default: %default]",
    )
    parser.add_option(
        "-k",
        "--kafkaavro",
        action="store_true",
        default=config.getboolean("PMGRPCD", "kafkaavro"),
        dest="kafkaavro",
        help=
        "enable forwarding to Kafka kafkaavro (with schema-registry) [default: %default]",
    )
    parser.add_option(
        "-o",
        "--onlyopenconfig",
        action="store_true",
        default=config.getboolean("PMGRPCD", "onlyopenconfig"),
        dest="onlyopenconfig",
        help="only accept pakets of openconfig",
    )
    parser.add_option("-i",
                      "--ip",
                      dest="ip",
                      help="only accept pakets of this single ip")
    parser.add_option(
        "-A",
        "--avscid",
        dest="avscid",
        help=
        "this is to serialize manually with avscid and jsondatafile (for development)",
    )
    parser.add_option(
        "-J",
        "--jsondatafile",
        dest="jsondatafile",
        help=
        "this is to serialize manually with avscid and jsondatafile (for development)",
    )
    parser.add_option(
        "-R",
        "--rawdatafile",
        dest="rawdatafile",
        help=
        "this is to process manually (via mitigation) process a rawdatafile with a single rawrecord (for development)",
    )
    parser.add_option(
        "-N",
        "--console",
        action="store_true",
        dest="console",
        help=
        "this is to display all log-messages also on console (for development)",
    )
    parser.add_option("-v",
                      action="store_true",
                      dest="version",
                      help="print version of this script")
    parser.add_option(
        "-s",
        "--kafkasimple",
        default=config.getboolean("PMGRPCD", "kafkasimple", fallback=False),
        dest="kafkasimple",
        help="Boolean if kafkasimple should be enabled.",
    )

    parser.add_option(
        "--file_exporter_file",
        default=config.get("PMGRPCD", "file_exporter_file", fallback=None),
        dest="file_exporter_file",
        help="Name of file for file exporter.",
    )

    parser.add_option(
        "--file_importer_file",
        default=config.get("PMGRPCD", "file_importer_file", fallback=None),
        dest="file_importer_file",
        help=
        "Name of the file to import. If set, we will ignore the rest of the importers.",
    )

    (lib_pmgrpcd.OPTIONS, args) = parser.parse_args()

    init_pmgrpcdlog()
    init_serializelog()

    if lib_pmgrpcd.OPTIONS.version:
        print(parser.get_version())
        raise SystemExit

    PMGRPCDLOG.info("Using %s as config file", CONFIGFILE)
    PMGRPCDLOG.info("startoptions of this script: %s",
                    str(lib_pmgrpcd.OPTIONS))

    # Test-Statements Logging
    # -----------------------
    # PMGRPCDLOG.debug('debug message')
    # PMGRPCDLOG.info('info message')
    # PMGRPCDLOG.warning('warn message')
    # PMGRPCDLOG.error('error message')
    # PMGRPCDLOG.critical('critical message')

    # serializelog.debug('debug message')
    # serializelog.info('info message')
    # serializelog.warning('warn message')
    # serializelog.error('error message')
    # serializelog.critical('critical message')

    configure()

    PMGRPCDLOG.info("enable listening to SIGNAL USR1 with Sinalhandler")
    signal.signal(signal.SIGUSR1, signalhandler)
    PMGRPCDLOG.info("enable listening to SIGNAL USR2 with Sinalhandler")
    signal.signal(signal.SIGUSR2, signalhandler)

    # I am going to comment the manually export of data from now, this could go into other script.
    if lib_pmgrpcd.OPTIONS.avscid and lib_pmgrpcd.OPTIONS.jsondatafile:
        manually_serialize()
    elif lib_pmgrpcd.OPTIONS.file_importer_file:
        file_importer = FileInput(lib_pmgrpcd.OPTIONS.file_importer_file)
        PMGRPCDLOG.info("Starting file import")
        file_importer.generate()
        PMGRPCDLOG.info("No more data, sleeping 3 secs")
        time.sleep(3)
        PMGRPCDLOG.info("Finalizing file import")
    elif lib_pmgrpcd.OPTIONS.avscid or lib_pmgrpcd.OPTIONS.jsondatafile:
        PMGRPCDLOG.info(
            "manually serialize need both lib_pmgrpcd.OPTIONS avscid and jsondatafile"
        )
        parser.print_help()
    #elif lib_pmgrpcd.OPTIONS.gnmi_enable:
    #    if lib_pmgrpcd.OPTIONS.gnmi_target is None:
    #        error = "gnmi target not configured, but gnmi enabled"
    #        PMGRPCDLOG.error(error)
    #        raise Exception(error)
    #
    #    PMGRPCDLOG.info("Starting contact with gnmi server %s. Other functions will be ignored", lib_pmgrpcd.OPTIONS.gnmi_target)
    #    channel = grpc.insecure_channel(lib_pmgrpcd.OPTIONS.gnmi_target)
    #    gnmi_client = GNMIClient(channel)
    #    breakpoint()

    else:
        # make sure some important files exist
        if not os.path.isfile(lib_pmgrpcd.OPTIONS.gpbmapfile):
            raise FileNotFound("No gpbmapfile file found in {}".format(
                lib_pmgrpcd.OPTIONS.gpbmapfile))

        # TODO: Do we really need this always?
        if not os.path.isfile(lib_pmgrpcd.OPTIONS.avscmapfile):
            raise FileNotFound("No avscmapfile file found in {}".format(
                lib_pmgrpcd.OPTIONS.avscmapfile))
        PMGRPCDLOG.info("pmgrpsd.py is started at %s", str(datetime.now()))
        serve()
Ejemplo n.º 4
0
def serve():

    gRPCserver = grpc.server(
        futures.ThreadPoolExecutor(max_workers=lib_pmgrpcd.OPTIONS.workers))

    if lib_pmgrpcd.OPTIONS.huawei:
        if lib_pmgrpcd.OPTIONS.cenctype == 'gpbkv':
            PMGRPCDLOG.info("Huawei is disabled because cenctype=gpbkv")
        else:
            PMGRPCDLOG.info("Huawei is enabled")
            # Ugly, but we have to load just here because if not there is an exception due to a conflict between the cisco and huawei protos.
            from huawei_pmgrpcd import gRPCDataserviceServicer
            huawei_grpc_dialout_pb2_grpc.add_gRPCDataserviceServicer_to_server(
                gRPCDataserviceServicer(), gRPCserver)
    else:
        PMGRPCDLOG.info("Huawei is disabled")

    if lib_pmgrpcd.OPTIONS.cisco:
        PMGRPCDLOG.info("Cisco is enabled")
        # Ugly, but we have to load just here because if not there is an exception due to a conflict between the cisco and huawei protos.
        from cisco_pmgrpcd import gRPCMdtDialoutServicer
        cisco_grpc_dialout_pb2_grpc.add_gRPCMdtDialoutServicer_to_server(
            gRPCMdtDialoutServicer(), gRPCserver)
    else:
        PMGRPCDLOG.info("Cisco is disabled")

    gRPCserver.add_insecure_port(lib_pmgrpcd.OPTIONS.ipport)
    gRPCserver.start()

    try:
        while True:
            time.sleep(_ONE_DAY_IN_SECONDS)
    except KeyboardInterrupt:
        gRPCserver.stop(0)
        PMGRPCDLOG.info("Stopping server")
        time.sleep(1)
Ejemplo n.º 5
0
def huawei_processing(grpcPeer, new_msg):
    PMGRPCDLOG.debug("Huawei: Received GRPC-Data")

    # dump the raw data
    if lib_pmgrpcd.OPTIONS.rawdatafile:
        PMGRPCDLOG.debug("Write rawdatafile: %s" %
                         (lib_pmgrpcd.OPTIONS.rawdatafile))
        with open(lib_pmgrpcd.OPTIONS.rawdatafile, "a") as rawdatafile:
            rawdatafile.write(base64.b64encode(new_msg.data).decode())
            rawdatafile.write("\n")

    try:
        telemetry_msg = huawei_telemetry_pb2.Telemetry()
        telemetry_msg.ParseFromString(new_msg.data)
    except Exception as e:
        PMGRPCDLOG.error(
            "instancing or parsing data failed with huawei_telemetry_pb2.Telemetry"
        )
        PMGRPCDLOG.error("ERROR: %s" % (e))
        raise

    try:
        telemetry_msg_dict = MessageToDict(
            telemetry_msg,
            including_default_value_fields=True,
            preserving_proto_field_name=True,
            use_integers_for_enums=True,
        )
    except Exception as e:
        PMGRPCDLOG.error(
            "instancing or parsing data failed with huawei_telemetry_pb2.Telemetry"
        )
        raise

    PMGRPCDLOG.debug("Huawei: Received GPB-Data as JSON")
    # TODO: Do we really need this? it can be expensive
    PMGRPCDLOG.debug(json.dumps(telemetry_msg_dict, indent=2, sort_keys=True))

    message_header_dict = telemetry_msg_dict.copy()

    if "data_gpb" in message_header_dict:
        del message_header_dict["data_gpb"]

    (proto, path) = message_header_dict["sensor_path"].split(":")
    (node_id_str) = message_header_dict["node_id_str"]
    (node_ip) = grpcPeer["telemetry_node"]
    (ne_vendor) = grpcPeer["ne_vendor"]

    # Get the maching L3-Methode
    msg = select_gbp_methode(proto)
    if msg:
        elem = len(telemetry_msg.data_gpb.row)
        epochmillis = int(round(time.time() * 1000))
        PMGRPCDLOG.info(
            "EPOCH=%-10s NIP=%-15s NID=%-20s VEN=%-7s PT=%-22s ET=%-12s ELEM:%s"
            %
            (epochmillis, node_ip, node_id_str, ne_vendor, proto, "GPB", elem))

        # L2:
        for new_row in telemetry_msg.data_gpb.row:
            # PMGRPCDLOG.info("NEW_ROW: %s" % (new_row))
            new_row_header_dict = MessageToDict(
                new_row,
                including_default_value_fields=True,
                preserving_proto_field_name=True,
                use_integers_for_enums=True,
            )

            if "content" in new_row_header_dict:
                del new_row_header_dict["content"]

            # L3:
            msg.ParseFromString(new_row.content)
            content = MessageToDict(
                msg,
                including_default_value_fields=True,
                preserving_proto_field_name=True,
                use_integers_for_enums=True,
            )

            message_dict = {}
            message_dict.update({
                "collector": {
                    "grpc": {
                        "grpcPeer": grpcPeer["telemetry_node"],
                        "ne_vendor": grpcPeer["ne_vendor"],
                    }
                }
            })
            message_dict["collector"].update(
                {"data": message_header_dict.copy()})
            message_dict["collector"]["data"].update(new_row_header_dict)
            message_dict.update(content)

            allkeys = parse_dict(content, ret="", level=0)
            PMGRPCDLOG.debug("Huawei: %s: %s" % (proto, allkeys))

            try:
                returned = FinalizeTelemetryData(message_dict)
            except Exception as e:
                PMGRPCDLOG.error("Error finalazing  message: %s", e)
Ejemplo n.º 6
0
 def __init__(self):
     PMGRPCDLOG.info("Huawei: Initializing gRPCDataserviceServicer()")
Ejemplo n.º 7
0
def cisco_processing(grpcPeer, new_msg):
    messages = {}
    grpc_message = {}
    encoding_type = None
    PMGRPCDLOG.debug("Cisco: Received GRPC-Data")
    PMGRPCDLOG.debug(new_msg.data)

    # dump the raw data
    if lib_pmgrpcd.OPTIONS.rawdatadumpfile:
        PMGRPCDLOG.debug("Write rawdatadumpfile: %s" % (lib_pmgrpcd.OPTIONS.rawdatafile))
        with open(lib_pmgrpcd.OPTIONS.rawdatadumpfile, "a") as rawdatafile:
            rawdatafile.write(base64.b64encode(new_msg.data).decode())
            rawdatafile.write("\n")

    # Find the encoding of the packet
    try:
        encoding_type, grpc_message = find_encoding_and_decode(new_msg)
    except Exception as e:
        PMGRPCDLOG.error("Error decoding packet. Error is {}".format(e))


    PMGRPCDLOG.debug("encoding_type is: %s\n" % (encoding_type))

    if (encoding_type == "unknown") or encoding_type is None:
        print("encoding_type is unknown.")


    if (encoding_type == "unknown") or encoding_type is None:
        raise Exception("Encoding type unknown")

    message_header_dict = grpc_message.copy()

    if "data_json" in message_header_dict:
        del message_header_dict["data_json"]

    PMGRPCDLOG.debug("Header:%s", message_header_dict)

    (node_ip) = grpcPeer["telemetry_node"]
    (ne_vendor) = grpcPeer["ne_vendor"]
    epochmillis = int(round(time.time() * 1000))

    if encoding_type == "ciscojson":
        message_header_dict.update({"encoding_type": encoding_type})
        (proto, path) = message_header_dict["encoding_path"].split(":")
        (node_id_str) = message_header_dict["node_id_str"]
        elem = len(grpc_message["data_json"])
        messages = grpc_message["data_json"]
    elif encoding_type == "ciscogrpckv":
        message_header_dict.update({"encoding_type": encoding_type})
        message_header_dict["encoding_path"] = message_header_dict.pop("encodingPath")
        message_header_dict["node_id_str"] = message_header_dict.pop("nodeIdStr")
        message_header_dict["msg_timestamp"] = message_header_dict.pop("msgTimestamp")
        message_header_dict["subscription_id_str"] = message_header_dict.pop(
            "subscriptionIdStr"
        )

        full_ecoding_path = message_header_dict["encoding_path"]
        if ":" in full_ecoding_path:
            (proto, path) = message_header_dict["encoding_path"].split(":")
        else:
            proto = None
            path = full_ecoding_path
        (node_id_str) = message_header_dict["node_id_str"]
        if "dataGpbkv" in grpc_message:
            elem = len(grpc_message["dataGpbkv"])
            messages = grpc_message["dataGpbkv"]
        else:
            elem = 0
            messages = {}
    message_header_dict["path"] = path

    PMGRPCDLOG.info(
        "EPOCH=%-10s NIP=%-15s NID=%-20s VEN=%-7s PT=%-22s ET=%-12s ELEM=%s",
        epochmillis,
        node_ip,
        node_id_str,
        ne_vendor,
        proto,
        encoding_type,
        elem,
    )

    # A single telemetry packet can contain multiple msgs (each having their own key/values).
    # here we are processing them one by one.

    for listelem in messages:
        # Copy the necessary metadata to the packet.
        PMGRPCDLOG.debug("LISTELEM: %s", listelem)

        message_dict = {}
        message_dict.update({"collector": {"grpc": {}}})
        message_dict["collector"]["grpc"].update(
            {"grpcPeer": grpcPeer["telemetry_node"]}
        )
        message_dict["collector"]["grpc"].update({"ne_vendor": grpcPeer["ne_vendor"]})
        message_dict["collector"].update({"data": message_header_dict})

        if encoding_type == "ciscojson":
            PMGRPCDLOG.debug("TEST: %s | %s", path, listelem["content"])
            message_dict.update({path: listelem["content"]})
        elif encoding_type == "ciscogrpckv":
            PMGRPCDLOG.debug("TEST: %s | %s", path, listelem["fields"])
            message_dict.update({path: listelem["fields"]})

        # allkeys = parse_dict(listelem, ret='', level=0)
        # PMGRPCDLOG.info("Cisco: %s: %s" % (proto, allkeys))
        try:
            returned = FinalizeTelemetryData(message_dict)
        except Exception as e:
            PMGRPCDLOG.error("Error finalazing  message: %s", e)
Ejemplo n.º 8
0
 def __init__(self):
     PMGRPCDLOG.info("Cisco: Initializing gRPCMdtDialoutServicer()")
Ejemplo n.º 9
0
def FinalizeTelemetryData(dictTelemetryData):

    # Adding epoch in millisecond to identify this singel metric on the way to the storage
    epochmillis = int(round(time.time() * 1000))
    dictTelemetryData["collector"]["data"].update(
        {"collection_timestamp": epochmillis})

    dictTelemetryData_mod = dictTelemetryData.copy()

    # Going over the mitigation library, if needed.
    # TODO: Simplify the next part
    dictTelemetryData_beforeencoding = None
    if lib_pmgrpcd.OPTIONS.mitigation:
        from mitigation import mod_all_json_data
        try:
            dictTelemetryData_mod = mod_all_json_data(dictTelemetryData_mod)
            dictTelemetryData_beforeencoding = dictTelemetryData_mod
            jsonTelemetryData = json.dumps(dictTelemetryData_mod,
                                           indent=2,
                                           sort_keys=True)
        except Exception as e:
            PMGRPCDLOG.info("ERROR: mod_all_json_data raised a error:\n%s")
            PMGRPCDLOG.info("ERROR: %s" % (e))
            dictTelemetryData_mod = dictTelemetryData
            dictTelemetryData_beforeencoding = dictTelemetryData
            jsonTelemetryData = json.dumps(dictTelemetryData,
                                           indent=2,
                                           sort_keys=True)
    else:
        dictTelemetryData_mod = dictTelemetryData
        dictTelemetryData_beforeencoding = dictTelemetryData
        jsonTelemetryData = json.dumps(dictTelemetryData,
                                       indent=2,
                                       sort_keys=True)

    PMGRPCDLOG.debug("After mitigation: %s" % (jsonTelemetryData))

    # Check if we need to transform. This will change later
    #breakpoint() if get_lock() else None
    path = dictTelemetryData_beforeencoding["collector"]["data"]["path"]
    actual_data = dictTelemetryData_beforeencoding.get(path, {})
    #if path == "sys/intf":
    #    return
    print(path)
    #breakpoint() if get_lock() else None

    if TRANSFORMATION and dictTelemetryData_beforeencoding and "dataGpbkv" in dictTelemetryData_beforeencoding.get(
            "collector", {}).get("data", {}):
        data = dictTelemetryData_beforeencoding["collector"]["data"].copy()
        data["dataGpbkv"] = [{"fields": actual_data}]
        # we just transform for kv
        metric = CiscoKVFlatten.build_from_dcit(data)
        internals = list(metric.get_internal())

        #breakpoint() if get_lock() else None
        for internal in internals:
            for new_metric in TRANSFORMATION.transform(internal):
                print(new_metric.keys)
                data = new_metric.data
                data["dataGpbkv"] = new_metric.content
                export_metrics(json.dumps({"collector": {"data": data}}))
        #breakpoint() if get_lock() else None
        return jsonTelemetryData
    #breakpoint() if get_lock() else None

    if lib_pmgrpcd.OPTIONS.examplepath and lib_pmgrpcd.OPTIONS.example:
        examples(dictTelemetryData_mod, jsonTelemetryData)

    if lib_pmgrpcd.OPTIONS.jsondatadumpfile:
        PMGRPCDLOG.debug("Write jsondatadumpfile: %s" %
                         (lib_pmgrpcd.OPTIONS.jsondatadumpfile))
        with open(lib_pmgrpcd.OPTIONS.jsondatadumpfile,
                  "a") as jsondatadumpfile:
            jsondatadumpfile.write(jsonTelemetryData)
            jsondatadumpfile.write("\n")

    # Filter only config.
    export = True
    if lib_pmgrpcd.OPTIONS.onlyopenconfig:
        PMGRPCDLOG.debug(
            "only openconfig filter matched because of options.onlyopenconfig: %s"
            % lib_pmgrpcd.OPTIONS.onlyopenconfig)
        export = False
        if "encoding_path" in dictTelemetryData_mod["collector"]["data"]:
            if ("openconfig" in dictTelemetryData_mod["collector"]["data"]
                ["encoding_path"]):
                export = True

    if export:
        export_metrics(jsonTelemetryData)

    return jsonTelemetryData