Esempio n. 1
0
        def _collect(device_id, prefix):

            try:
                # Step 1: gather metrics from device (pretend it here) - examples
                # upgraded the metrics to include packet statistics for
                # testing.
                nni_port_metrics = self.pm_metrics.collect_nni_metrics()
                pon_port_metrics = self.pm_metrics.collect_pon_metrics()

                olt_metrics = yield dict(cpu_util=20 + 5 * random.random(),
                                         buffer_util=10 + 10 * random.random())

                # Step 2: prepare the KpiEvent for submission
                # we can time-stamp them here (or could use time derived from OLT
                ts = arrow.utcnow().timestamp
                kpi_event = KpiEvent(
                    type=KpiEventType.slice,
                    ts=ts,
                    prefixes={
                        # OLT-level
                        prefix:
                        MetricValuePairs(metrics=olt_metrics),
                        # OLT NNI port
                        prefix + '.nni':
                        MetricValuePairs(metrics=nni_port_metrics),
                        # OLT PON port
                        prefix + '.pon':
                        MetricValuePairs(metrics=pon_port_metrics)
                    })

                # Step 3: submit
                self.adapter_agent.submit_kpis(kpi_event)

            except Exception as e:
                log.exception('failed-to-submit-kpis', e=e)
Esempio n. 2
0
        def _collect(device_id, prefix):

            try:
                # Step 1: gather metrics from device
                port_metrics = \
                    self.pm_metrics.collect_port_metrics(self.get_channel())

                # Step 2: prepare the KpiEvent for submission
                # we can time-stamp them here (or could use time derived from OLT
                ts = arrow.utcnow().timestamp
                kpi_event = KpiEvent(
                    type=KpiEventType.slice,
                    ts=ts,
                    prefixes={
                        # OLT NNI port
                        prefix + '.nni': MetricValuePairs(
                            metrics=port_metrics['nni']),
                        # OLT PON port
                        prefix + '.pon': MetricValuePairs(
                            metrics=port_metrics['pon'])
                    }
                )

                # Step 3: submit
                self.adapter_agent.submit_kpis(kpi_event)

            except Exception as e:
                log.exception('failed-to-submit-kpis', e=e)
Esempio n. 3
0
    def receive_message(self, msg):
        if isinstance(msg, PonSimMetrics):
            # Message is a reply to an ONU statistics request. Push it out to Kafka via adapter.submit_kpis().
            if self.pm_metrics:
                self.log.debug('Handling incoming ONU metrics')
                prefix = 'voltha.{}.{}'.format("ponsim_onu", self.device_id)
                port_metrics = self.pm_metrics.extract_metrics(msg)
                try:
                    ts = arrow.utcnow().timestamp
                    kpi_event = KpiEvent(
                        type=KpiEventType.slice,
                        ts=ts,
                        prefixes={
                            # OLT NNI port
                            prefix + '.uni':
                            MetricValuePairs(metrics=port_metrics['uni']),
                            # OLT PON port
                            prefix + '.pon':
                            MetricValuePairs(metrics=port_metrics['pon'])
                        })

                    self.log.debug('Submitting KPI for incoming ONU metrics')

                    # Step 3: submit
                    self.adapter_agent.submit_kpis(kpi_event)
                except Exception as e:
                    log.exception('failed-to-submit-kpis', e=e)
            else:
                # We received a statistics message, but we don't have pm_metrics set up. This shouldn't happen.
                self.log.warning('received unexpected PonSimMetrics')
        else:
            # The message is probably a reply to a FlowTable update. self.update_flow_table() will pop it off this
            # queue and return it to its caller.
            self.incoming_messages.put(msg)
Esempio n. 4
0
    def ports_statistics_kpis(self, port_stats):
        pm_data = {}
        pm_data["rx_bytes"] = port_stats.rx_bytes
        pm_data["rx_packets"] = port_stats.rx_packets
        pm_data["rx_ucast_packets"] = port_stats.rx_ucast_packets
        pm_data["rx_mcast_packets"] = port_stats.rx_mcast_packets
        pm_data["rx_bcast_packets"] = port_stats.rx_bcast_packets
        pm_data["rx_error_packets"] = port_stats.rx_error_packets
        pm_data["tx_bytes"] = port_stats.tx_bytes
        pm_data["tx_packets"] = port_stats.tx_packets
        pm_data["tx_ucast_packets"] = port_stats.tx_ucast_packets
        pm_data["tx_mcast_packets"] = port_stats.tx_mcast_packets
        pm_data["tx_bcast_packets"] = port_stats.tx_bcast_packets
        pm_data["tx_error_packets"] = port_stats.tx_error_packets
        pm_data["rx_crc_errors"] = port_stats.rx_crc_errors
        pm_data["bip_errors"] = port_stats.bip_errors

        prefix = 'voltha.openolt.{}'.format(self.device_id)
        # FIXME
        if port_stats.intf_id < 132:
            prefixes = {
                prefix + '{}.nni'.format(port_stats.intf_id):
                MetricValuePairs(metrics=pm_data)
            }
        else:
            prefixes = {
                prefix + '.pon.{}'.format(
                    platform.intf_id_from_pon_port_no(port_stats.intf_id)):
                MetricValuePairs(metrics=pm_data)
            }

        kpi_event = KpiEvent(type=KpiEventType.slice,
                             ts=port_stats.timestamp,
                             prefixes=prefixes)
        self.adapter_agent.submit_kpis(kpi_event)
Esempio n. 5
0
    def publish_metrics(self, metrics):
        """
        Publish the metrics during a collection

        :param metrics: (dict) Metrics to publish. If empty, no metrics will be published
        """
        self.log.debug('publish-metrics', metrics=metrics)

        if len(metrics):
            import arrow
            from voltha.protos.events_pb2 import KpiEvent, KpiEventType, MetricValuePairs

            try:
                ts = arrow.utcnow().timestamp
                kpi_event = KpiEvent(type=KpiEventType.slice,
                                     ts=ts,
                                     prefixes={
                                         self.prefix + '.{}'.format(k):
                                         MetricValuePairs(metrics=metrics[k])
                                         for k in metrics.keys()
                                     })
                self.adapter_agent.submit_kpis(kpi_event)

            except Exception as e:
                self.log.exception('failed-to-submit-kpis', e=e)
Esempio n. 6
0
    def publish_metrics(self, interval_data):
        """
        Collect the metrics for this ONU PM Interval

        :param interval_data: (dict) PM interval dictionary with structure of
                    {
                        'class_id': self._class_id,
                        'entity_id': self._entity_id,
                        'me_name': self._entity.__name__,   # Mostly for debugging...
                        'interval_utc_time': None,
                        # Counters added here as they are retrieved
                    }

        :return: (dict) Key/Value of metric data
        """
        self.log.debug('publish-metrics', metrics=interval_data)

        try:
            import arrow
            from voltha.protos.events_pb2 import KpiEvent, KpiEventType, MetricValuePairs
            # Locate config

            class_id = interval_data['class_id']
            config = self._configs.get(class_id)
            group = self.pm_group_metrics.get(OnuPmIntervalMetrics.ME_ID_INFO.get(class_id, ''))

            if config is not None and group is not None and group.enabled:
                # Extract only the metrics we need to publish
                config_keys = config.keys()
                metrics = {
                    interval_data['me_name']: {k: v
                                               for k, v in interval_data.items()
                                               if k in config_keys and v is not None}
                }
                # Prepare the KpiEvent for submission
                kpi_event = KpiEvent(
                    type=KpiEventType.slice,
                    ts=arrow.get(interval_data['interval_utc_time']).timestamp,
                    prefixes={
                        self.prefix + '.{}'.format(k): MetricValuePairs(metrics=metrics[k])
                        for k in metrics.keys()}
                )
                self.adapter_agent.submit_kpis(kpi_event)

        except Exception as e:
            self.log.exception('failed-to-submit-kpis', e=e)
Esempio n. 7
0
        def _collect(device_id, prefix):

            try:
                # Step 1: gather metrics from device (pretend it here) - examples
                uni_port_metrics = yield dict(
                    tx_pkts=random.randint(0, 100),
                    rx_pkts=random.randint(0, 100),
                    tx_bytes=random.randint(0, 100000),
                    rx_bytes=random.randint(0, 100000),
                )
                pon_port_metrics = yield dict(
                    tx_pkts=uni_port_metrics['rx_pkts'],
                    rx_pkts=uni_port_metrics['tx_pkts'],
                    tx_bytes=uni_port_metrics['rx_bytes'],
                    rx_bytes=uni_port_metrics['tx_bytes'],
                )
                onu_metrics = yield dict(cpu_util=20 + 5 * random.random(),
                                         buffer_util=10 + 10 * random.random())

                # Step 2: prepare the KpiEvent for submission
                # we can time-stamp them here (or could use time derived from OLT
                ts = arrow.utcnow().timestamp
                kpi_event = KpiEvent(
                    type=KpiEventType.slice,
                    ts=ts,
                    prefixes={
                        # OLT-level
                        prefix:
                        MetricValuePairs(metrics=onu_metrics),
                        # OLT NNI port
                        prefix + '.nni':
                        MetricValuePairs(metrics=uni_port_metrics),
                        # OLT PON port
                        prefix + '.pon':
                        MetricValuePairs(metrics=pon_port_metrics)
                    })

                # Step 3: submit
                self.adapter_agent.submit_kpis(kpi_event)

            except Exception as e:
                log.exception('failed-to-submit-kpis', e=e)
Esempio n. 8
0
    def remote_report_stats(self, object, key, stats_data):
        log.info('received-stats-msg',
                 object=object,
                 key=key,
                 stats=stats_data)

        prefix = 'voltha.{}.{}'.format(self.adapter_name, self.device_id)

        try:
            ts = arrow.utcnow().timestamp

            prefixes = {prefix + '.nni': MetricValuePairs(metrics=stats_data)}

            kpi_event = KpiEvent(type=KpiEventType.slice,
                                 ts=ts,
                                 prefixes=prefixes)

            self.adapter_agent.submit_kpis(kpi_event)

        except Exception as e:
            log.exception('failed-to-submit-kpis', e=e)
Esempio n. 9
0
        def _collect(device_id, prefix):
            from voltha.protos.events_pb2 import KpiEvent, KpiEventType, MetricValuePairs

            if self.enabled:
                try:
                    # Step 1: gather metrics from device
                    port_metrics = self.pm_metrics.collect_port_metrics()

                    # Step 2: prepare the KpiEvent for submission
                    # we can time-stamp them here or could use time derived from OLT
                    ts = arrow.utcnow().timestamp
                    kpi_event = KpiEvent(
                        type=KpiEventType.slice,
                        ts=ts,
                        prefixes={
                            prefix + '.{}'.format(k): MetricValuePairs(metrics=port_metrics[k])
                            for k in port_metrics.keys()}
                    )
                    # Step 3: submit
                    self.adapter_agent.submit_kpis(kpi_event)

                except Exception as e:
                    self.log.exception('failed-to-submit-kpis', e=e)
Esempio n. 10
0
        def _collect(device_id, prefix):

            pon_port_metrics = {}
            links = []
            olt_mac = next((mac for mac, device in self.device_ids.iteritems() if device == device_id), None)
            links   = [v[TIBIT_ONU_LINK_INDEX] for _,v in self.vlan_to_device_ids.iteritems()]

            try:
                # Step 1: gather metrics from device
                log.info('link stats frame', links=links)
                for link in links:
                    stats_frame = self._make_stats_frame(mac_address=olt_mac, itype='olt', link=link)
                    self.io_port.send(stats_frame)

                    ## Add timeout mechanism so we can signal if we cannot reach
                    ## device
                    while True:
                        response = yield self.incoming_queues[olt_mac].get()
                        jdict = json.loads(response.payload.payload.body.load)
                        pon_port_metrics[link] = {k: int(v,16) for k,v in jdict['results'].iteritems()}
                        # verify response and if not the expected response
                        if 1: # TODO check if it is really what we expect, and wait if not
                            break

                log.info('nni stats frame')
                olt_nni_link = ''.join(l for l in olt_mac.split(':'))
                stats_frame = self._make_stats_frame(mac_address=olt_mac, itype='eth', link=olt_nni_link)
                self.io_port.send(stats_frame)

                ## Add timeout mechanism so we can signal if we cannot reach
                ## device
                while True:
                    response = yield self.incoming_queues[olt_mac].get()
                    jdict = json.loads(response.payload.payload.body.load)
                    nni_port_metrics = {k: int(v,16) for k,v in jdict['results'].iteritems()}
                    # verify response and if not the expected response
                    if 1: # TODO check if it is really what we expect, and wait if not
                        break

                olt_metrics = dict(
                    cpu_util=20 + 5 * random.random(),
                    buffer_util=10 + 10 * random.random()
                )

                # Step 2: prepare the KpiEvent for submission
                # we can time-stamp them here (or could use time derived from OLT
                ts = arrow.utcnow().timestamp
                prefixes = {
                    # CPU Metrics (example)
                    prefix: MetricValuePairs(metrics=olt_metrics),
                    # OLT NNI port
                    prefix + '.nni': MetricValuePairs(metrics=nni_port_metrics)
                    }

                for link in links:
                    # PON link ports
                    prefixes[prefix + '.pon.{}'.format(link)] = MetricValuePairs(metrics=pon_port_metrics[link])

                kpi_event = KpiEvent(
                    type=KpiEventType.slice,
                    ts=ts,
                    prefixes=prefixes
                )

                # Step 3: submit
                self.adapter_agent.submit_kpis(kpi_event)

            except Exception as e:
                log.exception('failed-to-submit-kpis', e=e)
Esempio n. 11
0
        def _collect(device_id, prefix):

            try:
                # Step 1: gather metrics from device (pretend it here) - examples
                # upgraded the metrics to include packet statistics for
                # testing.
                nni_port_metrics = yield dict(
                    tx_pkts=self.nni_tx_pkts + random.randint(90, 100),
                    rx_pkts=self.nni_rx_pkts + random.randint(90, 100),
                    tx_bytes=self.nni_tx_bytes + random.randint(90000, 100000),
                    rx_bytes=self.nni_rx_bytes + random.randint(90000, 100000),
                    tx_64=self.nni_tx_64 + random.randint(50, 55),
                    tx_65_127=self.nni_tx_65_127 + random.randint(55, 60),
                    tx_128_255=self.nni_tx_128_255 + random.randint(60, 65),
                    tx_256_511=self.nni_tx_256_511 + random.randint(85, 90),
                    tx_512_1023=self.nni_tx_512_1023 + random.randint(90, 95),
                    tx_1024_1518=self.nni_tx_1024_1518 + random.randint(60,65),
                    tx_1519_9k=self.nni_tx_1519_9k + random.randint(50, 55),

                    rx_64=self.nni_tx_64 + random.randint(50, 55),
                    rx_65_127=self.nni_tx_65_127 + random.randint(55, 60),
                    rx_128_255=self.nni_tx_128_255 + random.randint(60, 65),
                    rx_256_511=self.nni_tx_256_511 + random.randint(85, 90),
                    rx_512_1023=self.nni_tx_512_1023 + random.randint(90, 95),
                    rx_1024_1518=self.nni_tx_1024_1518 + random.randint(60,65),
                    rx_1519_9k=self.nni_tx_1519_9k + random.randint(50, 55)
                )
                pon_port_metrics = yield dict(
                    tx_pkts=self.pon_tx_pkts + random.randint(90, 100),
                    rx_pkts=self.pon_rx_pkts + random.randint(90, 100),
                    tx_bytes=self.pon_tx_bytes + random.randint(90000, 100000),
                    rx_bytes=self.pon_rx_bytes + random.randint(90000, 100000),
                    tx_64=self.pon_tx_64 + random.randint(50, 55),
                    tx_65_127=self.pon_tx_65_127 + random.randint(55, 60),
                    tx_128_255=self.pon_tx_128_255 + random.randint(60, 65),
                    tx_256_511=self.pon_tx_256_511 + random.randint(85, 90),
                    tx_512_1023=self.pon_tx_512_1023 + random.randint(90, 95),
                    tx_1024_1518=self.pon_tx_1024_1518 + random.randint(60,65),
                    tx_1519_9k=self.pon_tx_1519_9k + random.randint(50, 55),

                    rx_64=self.pon_tx_64 + random.randint(50, 55),
                    rx_65_127=self.pon_tx_65_127 + random.randint(55, 60),
                    rx_128_255=self.pon_tx_128_255 + random.randint(60, 65),
                    rx_256_511=self.pon_tx_256_511 + random.randint(85, 90),
                    rx_512_1023=self.pon_tx_512_1023 + random.randint(90, 95),
                    rx_1024_1518=self.pon_tx_1024_1518 + random.randint(60,65),
                    rx_1519_9k=self.pon_tx_1519_9k + random.randint(50, 55)
                )
                self.pon_tx_pkts = pon_port_metrics['tx_pkts']
                self.pon_rx_pkts = pon_port_metrics['rx_pkts']
                self.pon_tx_bytes = pon_port_metrics['tx_bytes']
                self.pon_rx_bytes = pon_port_metrics['rx_bytes']

                self.pon_tx_64 = pon_port_metrics['tx_64']
                self.pon_tx_65_127 = pon_port_metrics['tx_65_127']
                self.pon_tx_128_255 = pon_port_metrics['tx_128_255']
                self.pon_tx_256_511 = pon_port_metrics['tx_256_511']
                self.pon_tx_512_1023 = pon_port_metrics['tx_512_1023']
                self.pon_tx_1024_1518 = pon_port_metrics['tx_1024_1518']
                self.pon_tx_1519_9k = pon_port_metrics['tx_1519_9k']

                self.pon_rx_64 = pon_port_metrics['rx_64']
                self.pon_rx_65_127 = pon_port_metrics['rx_65_127']
                self.pon_rx_128_255 = pon_port_metrics['rx_128_255']
                self.pon_rx_256_511 = pon_port_metrics['rx_256_511']
                self.pon_rx_512_1023 = pon_port_metrics['rx_512_1023']
                self.pon_rx_1024_1518 = pon_port_metrics['rx_1024_1518']
                self.pon_rx_1519_9k = pon_port_metrics['rx_1519_9k']

                self.nni_tx_pkts = nni_port_metrics['tx_pkts']
                self.nni_rx_pkts = nni_port_metrics['rx_pkts']
                self.nni_tx_bytes = nni_port_metrics['tx_bytes']
                self.nni_rx_bytes = nni_port_metrics['rx_bytes']

                self.nni_tx_64 = nni_port_metrics['tx_64']
                self.nni_tx_65_127 = nni_port_metrics['tx_65_127']
                self.nni_tx_128_255 = nni_port_metrics['tx_128_255']
                self.nni_tx_256_511 = nni_port_metrics['tx_256_511']
                self.nni_tx_512_1023 = nni_port_metrics['tx_512_1023']
                self.nni_tx_1024_1518 = nni_port_metrics['tx_1024_1518']
                self.nni_tx_1519_9k = nni_port_metrics['tx_1519_9k']

                self.nni_rx_64 = nni_port_metrics['rx_64']
                self.nni_rx_65_127 = nni_port_metrics['rx_65_127']
                self.nni_rx_128_255 = nni_port_metrics['rx_128_255']
                self.nni_rx_256_511 = nni_port_metrics['rx_256_511']
                self.nni_rx_512_1023 = nni_port_metrics['rx_512_1023']
                self.nni_rx_1024_1518 = nni_port_metrics['rx_1024_1518']
                self.nni_rx_1519_9k = nni_port_metrics['rx_1519_9k']

                olt_metrics = yield dict(
                    cpu_util=20 + 5 * random.random(),
                    buffer_util=10 + 10 * random.random()
                )

                # Step 2: prepare the KpiEvent for submission
                # we can time-stamp them here (or could use time derived from OLT
                ts = arrow.utcnow().timestamp
                kpi_event = KpiEvent(
                    type=KpiEventType.slice,
                    ts=ts,
                    prefixes={
                        # OLT-level
                        prefix: MetricValuePairs(metrics=olt_metrics),
                        # OLT NNI port
                        prefix + '.nni': MetricValuePairs(
                            metrics=nni_port_metrics),
                        # OLT PON port
                        prefix + '.pon': MetricValuePairs(
                            metrics=pon_port_metrics)
                    }
                )

                # Step 3: submit
                self.adapter_agent.submit_kpis(kpi_event)

            except Exception as e:
                log.exception('failed-to-submit-kpis', e=e)