Esempio n. 1
0
 def check_heartbeat_delivery():
     try:
         kafka_cluster_proxy = get_kafka_proxy()
         if kafka_cluster_proxy:
             kafka_cluster_proxy.check_heartbeat_delivery()
     except Exception as e:
         self.log.exception('failed-checking-heartbeat-delivery', e=e)
Esempio n. 2
0
    def process_kafka_alive_state_update(self, alive_state):
        self.log.debug('process-kafka-alive-state-update', alive_state=alive_state)
        Probe.kafka_cluster_proxy_running = alive_state

        kafka_cluster_proxy = get_kafka_proxy()
        if kafka_cluster_proxy:
            Probe.kafka_proxy_faulty = kafka_cluster_proxy.is_faulty()
Esempio n. 3
0
 def send_msg(start_time):
     try:
         kafka_cluster_proxy = get_kafka_proxy()
         if kafka_cluster_proxy and not kafka_cluster_proxy.is_faulty():
             # self.log.debug('kafka-proxy-available')
             message['ts'] = arrow.utcnow().timestamp
             message['uptime'] = time.time() - start_time
             # self.log.debug('start-kafka-heartbeat')
             kafka_cluster_proxy.send_message(topic, dumps(message))
         else:
             self.log.error('kafka-proxy-unavailable')
     except Exception, e:
         self.log.exception('failed-sending-message-heartbeat', e=e)
Esempio n. 4
0
        def send_heartbeat_msg():
            try:
                kafka_cluster_proxy = get_kafka_proxy()
                if kafka_cluster_proxy:
                    message['ts'] = arrow.utcnow().timestamp
                    self.log.debug('sending-kafka-heartbeat-message')

                    # Creating a handler to receive the message callbacks
                    df = Deferred()
                    df.addCallback(self.process_kafka_alive_state_update)
                    kafka_cluster_proxy.register_alive_state_update(df)
                    kafka_cluster_proxy.send_heartbeat_message(topic, dumps(message))
                else:
                    Probe.kafka_cluster_proxy_running = False
                    self.log.error('kafka-proxy-unavailable')
            except Exception as e:
                self.log.exception('failed-sending-message-heartbeat', e=e)
Esempio n. 5
0
    def start_kpi_collection(self, device_id):

        kafka_cluster_proxy = get_kafka_proxy()

        def _collect(device_id, prefix):

            try:
                # Step 1: gather metrics from device
                port_metrics = \
                    self.pm_metrics.collect_port_metrics(self.channel)

                # Step 2: prepare the KpiEvent for submission
                # we can time-stamp them here (or could use time derived from OLT
                ts = arrow.utcnow().timestamp
                kpi_event = KpiEvent(
                    type=KpiEventType.slice,
                    ts=ts,
                    prefixes={
                        # OLT NNI port
                        prefix + '.nni':
                        MetricValuePairs(metrics=port_metrics['nni']),
                        # OLT PON port
                        prefix + '.pon':
                        MetricValuePairs(metrics=port_metrics['pon'])
                    })

                # Step 3: submit directly to the kafka bus
                if kafka_cluster_proxy:
                    if isinstance(kpi_event, Message):
                        kpi_event = dumps(MessageToDict(kpi_event, True, True))
                    kafka_cluster_proxy.send_message("voltha.kpis", kpi_event)

            except Exception as e:
                log.exception('failed-to-submit-kpis', e=e)

        self.pm_metrics.start_collector(_collect)
Esempio n. 6
0
    def start_kpi_collection(self, device_id):
        kafka_cluster_proxy = get_kafka_proxy()

        @inlineCallbacks
        def _collect(device_id, prefix):
            try:
                self.log.debug("pm-collection-interval")
                # Proxy a message to ponsim_olt. The OLT will then query the ONU
                # for statistics. The reply will
                # arrive proxied back to us in self.receive_message().
                msg = PonSimMetricsRequest(port=self.proxy_address.channel_id)

                # Create a deferred to wait for the result as well as a transid
                wait_for_result = Deferred()
                trnsId = uuid4().hex
                self.inter_adapter_message_deferred_map[self._to_string(
                    trnsId)] = wait_for_result

                # Sends the request via proxy and wait for an ACK
                yield self.adapter_proxy.send_inter_adapter_message(
                    msg=msg,
                    type=InterAdapterMessageType.METRICS_REQUEST,
                    from_adapter=self.adapter.name,
                    to_adapter=self.proxy_address.device_type,
                    to_device_id=self.device_id,
                    proxy_device_id=self.proxy_address.device_id,
                    message_id=trnsId)
                # Wait for the full response from the proxied adapter
                res = yield wait_for_result
                # Remove the transaction from the transaction map
                del self.inter_adapter_message_deferred_map[self._to_string(
                    trnsId)]

                # Message is a reply to an ONU statistics request. Push it out to
                #  Kafka via adapter.submit_kpis().
                if res.header.type == InterAdapterMessageType.METRICS_RESPONSE:
                    msg = InterAdapterResponseBody()
                    res.body.Unpack(msg)
                    self.log.debug('metrics-response-received',
                                   result=msg.status)
                    if self.pm_metrics:
                        self.log.debug('Handling incoming ONU metrics')
                        response = PonSimMetrics()
                        msg.body.Unpack(response)
                        port_metrics = self.pm_metrics.extract_metrics(
                            response)
                        try:
                            ts = arrow.utcnow().timestamp
                            kpi_event = KpiEvent(
                                type=KpiEventType.slice,
                                ts=ts,
                                prefixes={
                                    # OLT NNI port
                                    prefix + '.uni':
                                    MetricValuePairs(
                                        metrics=port_metrics['uni']),
                                    # OLT PON port
                                    prefix + '.pon':
                                    MetricValuePairs(
                                        metrics=port_metrics['pon'])
                                })

                            self.log.debug(
                                'Submitting KPI for incoming ONU mnetrics')

                            # Step 3: submit directly to the kafka bus
                            if kafka_cluster_proxy:
                                if isinstance(kpi_event, Message):
                                    kpi_event = dumps(
                                        MessageToDict(kpi_event, True, True))
                                kafka_cluster_proxy.send_message(
                                    "voltha.kpis", kpi_event)

                        except Exception as e:
                            log.exception('failed-to-submit-kpis', e=e)
            except Exception as e:
                log.exception('failed-to-collect-metrics', e=e)

        self.pm_metrics.start_collector(_collect)