def send_notification(self, xos_pod, k8s_pod, status): event = { "status": status, "name": xos_pod.name, "producer": "k8s-sync" } if xos_pod.id: event["kubernetesserviceinstance_id"] = xos_pod.id if k8s_pod: event["labels"] = k8s_pod.metadata.labels if k8s_pod.status.pod_ip: event["netinterfaces"] = [{ "name": "primary", "addresses": [k8s_pod.status.pod_ip] }] topic = "xos.kubernetes.pod-details" key = xos_pod.name value = json.dumps(event, default=lambda o: repr(o)) XOSKafkaProducer.produce(topic, key, value)
def push_kafka_event(self, deleted=False, pk=None): # Transmit update via kafka model = self.serialize_for_messagebus() bases = inspect.getmro(self.__class__) class_names = ",".join([x.__name__ for x in bases]) model["class_names"] = class_names if not pk: pk = self.pk json_dict = { "pk": pk, "changed_fields": self.changed_fields, "object": model } if deleted: json_dict["deleted"] = True json_dict["object"]["id"] = pk topic = "xos.gui_events" key = self.__class__.__name__ json_value = json.dumps(json_dict, default=json_handler) XOSKafkaProducer.produce(topic, key, json_value)
def send_alarm(self, switch, port, value): timestamp = time.mktime(datetime.datetime.strptime(value["timestamp"], "%Y-%m-%dT%H:%M:%S.%fZ").timetuple()) state = "RAISED" if port.oper_status == "disabled" else "CLEARED" context = {"portId": port.portId, "portKind": port.kind or "unknown", "switch.name": switch.name} alarm = {"category": "SWITCH", "reported_ts": time.time(), "raised_ts": timestamp, "state": state, "alarm_type_name": "SWITCH.PORT_LOS", "severity": "MAJOR", "resource_id": switch.ofId, "context": context, "type": "COMMUNICATION", "id": "xos.fabricservice.%s.SWITCH_PORT_LOS" % switch.ofId, "description": "xos.fabricservice.%s - SWITCH PORT LOS Alarm -" " SWITCH_PORT_LOS - %s" % (switch.ofId, state)} topic = "xos.alarms.fabric-service" key = "%s:%s" % (switch.ofId, port.portId) value = json.dumps(alarm, default=lambda o: repr(o)) XOSKafkaProducer.produce(topic, key, value)
def send_alarm(self, olt, value): timestamp = time.mktime( datetime.datetime.strptime(value["timestamp"], "%Y-%m-%dT%H:%M:%S.%fZ").timetuple()) state = "RAISED" if olt.link_status == "down" else "CLEARED" # Hypothetically, a maximum of 64 subscribers per pon port, 16 pon ports, and 32 characters # per subscriber name = 32KB of subscriber names in the event. subscribers = self.subscriber_olt_closure(olt) subscribers = [x.name for x in subscribers] alarm = { "category": "OLT", "reported_ts": time.time(), "raised_ts": timestamp, "state": state, "alarm_type_name": "OLT.PORT_LOS", "severity": "MAJOR", "resource_id": olt.device_id, "logical_device_id": olt.dp_id, "context": { "affected_subscribers": subscribers, "switch_datapath_id": olt.switch_datapath_id, "switch_port": olt.switch_port, "oltdevice.name": olt.name }, "type": "COMMUNICATION", "id": "xos.voltservice.%s.OLT_PORT_LOS" % olt.device_id, "description": "xos.voltservice.%s - OLT PORT LOS Alarm -" " OLT_PORT_LOS - %s" % (olt.device_id, state) } topic = "xos.alarms.olt-service" key = olt.device_id value = json.dumps(alarm, default=lambda o: repr(o)) XOSKafkaProducer.produce(topic, key, value)
def send_alarm(self, switch, port, value): timestamp = time.mktime( datetime.datetime.strptime(value["timestamp"], "%Y-%m-%dT%H:%M:%S.%fZ").timetuple()) state = "RAISED" if port.oper_status == "disabled" else "CLEARED" context = { "portId": port.portId, "portKind": port.kind or "unknown", "switch.name": switch.name } alarm = { "category": "SWITCH", "reported_ts": time.time(), "raised_ts": timestamp, "state": state, "alarm_type_name": "SWITCH.PORT_LOS", "severity": "MAJOR", "resource_id": switch.ofId, "context": context, "type": "COMMUNICATION", "id": "xos.fabricservice.%s.SWITCH_PORT_LOS" % switch.ofId, "description": "xos.fabricservice.%s - SWITCH PORT LOS Alarm -" " SWITCH_PORT_LOS - %s" % (switch.ofId, state) } topic = "xos.alarms.fabric-service" key = "%s:%s" % (switch.ofId, port.portId) value = json.dumps(alarm, default=lambda o: repr(o)) XOSKafkaProducer.produce(topic, key, value)
import argparse import prometheus_client # FIXME: should grpc_server initialize the Config? from grpc_server import XOSGrpcServer from xosconfig import Config from xoskafka import XOSKafkaProducer from multistructlog import create_logger log = create_logger(Config().get("logging")) # create an single kafka producer connection for the core XOSKafkaProducer.init() def parse_args(): parser = argparse.ArgumentParser() parser.add_argument( "--model_status", dest="model_status", type=int, default=0, help="status of model prep", ) parser.add_argument( "--model_output", dest="model_output", type=file,