def exabgp_msg(bgp_message): redis.set( "exabgp_seen_bgp_update", "1", ex=int( os.getenv( "MON_TIMEOUT_LAST_BGP_UPDATE", DEFAULT_MON_TIMEOUT_LAST_BGP_UPDATE, ) ), ) msg = { "type": bgp_message["type"], "communities": bgp_message.get("communities", []), "timestamp": float(bgp_message["timestamp"]), "path": bgp_message.get("path", []), "service": "exabgp|{}".format(self.host), "prefix": bgp_message["prefix"], "peer_asn": int(bgp_message["peer_asn"]), } if validator.validate(msg): with Producer(connection) as producer: msgs = normalize_msg_path(msg) for msg in msgs: key_generator(msg) log.debug(msg) producer.publish( msg, exchange=self.exchange, routing_key="update", serializer="json", ) else: log.warning("Invalid format message: {}".format(msg))
def on_ris_msg(msg): try: producer = Producer(connection) normalize_ripe_ris(msg) if mformat_validator(msg): msgs = normalize_msg_path(msg) for msg in msgs: key_generator(msg) log.debug(msg) producer.publish(msg, exchange=exchange, routing_key='update', serializer='json') else: log.warning('Invalid format message: {}'.format(msg)) except Exception: log.exception('exception')
def exabgp_msg(bgp_message): msg = { 'type': bgp_message['type'], 'communities': bgp_message.get('communities', []), 'timestamp': float(bgp_message['timestamp']), 'path': bgp_message.get('path', []), 'service': 'exabgp|{}'.format(self.host), 'prefix': bgp_message['prefix'], 'peer_asn': int(bgp_message['peer_asn']) } if mformat_validator(msg): with Producer(connection) as producer: msgs = normalize_msg_path(msg) for msg in msgs: key_generator(msg) log.debug(msg) producer.publish(msg, exchange=self.exchange, routing_key='update', serializer='json') else: log.warning('Invalid format message: {}'.format(msg))
def run_bgpstream(prefixes_file=None, projects=[], start=0, end=0): """ Retrieve all records related to a list of prefixes https://bgpstream.caida.org/docs/api/pybgpstream/_pybgpstream.html :param prefixes_file: <str> input prefix json :param start: <int> start timestamp in UNIX epochs :param end: <int> end timestamp in UNIX epochs (if 0 --> "live mode") :return: - """ prefixes = load_json(prefixes_file) assert prefixes is not None # create a new bgpstream instance and a reusable bgprecord instance stream = _pybgpstream.BGPStream() # consider collectors from given projects for project in projects: stream.add_filter("project", project) # filter prefixes for prefix in prefixes: stream.add_filter("prefix", prefix) # filter record type stream.add_filter("record-type", "updates") # filter based on timing (if end=0 --> live mode) stream.add_interval_filter(start, end) # set live mode stream.set_live_mode() # start the stream stream.start() # print('BGPStream started...') # print('Projects ' + str(projects)) # print('Prefixes ' + str(prefixes)) # print('Start ' + str(start)) # print('End ' + str(end)) with Connection(RABBITMQ_URI) as connection: exchange = Exchange( "bgp-update", channel=connection, type="direct", durable=False ) exchange.declare() producer = Producer(connection) validator = mformat_validator() while True: # get next record try: rec = stream.get_next_record() except BaseException: continue if (rec.status != "valid") or (rec.type != "update"): continue # get next element try: elem = rec.get_next_elem() except BaseException: continue while elem: if elem.type in {"A", "W"}: this_prefix = str(elem.fields["prefix"]) service = "bgpstream|{}|{}".format( str(rec.project), str(rec.collector) ) type_ = elem.type if type_ == "A": as_path = elem.fields["as-path"].split(" ") communities = [ { "asn": int(comm.split(":")[0]), "value": int(comm.split(":")[1]), } for comm in elem.fields["communities"] ] else: as_path = [] communities = [] timestamp = float(rec.time) peer_asn = elem.peer_asn for prefix in prefixes: base_ip, mask_length = this_prefix.split("/") our_prefix = IPNetwork(prefix) if ( IPAddress(base_ip) in our_prefix and int(mask_length) >= our_prefix.prefixlen ): msg = { "type": type_, "timestamp": timestamp, "path": as_path, "service": service, "communities": communities, "prefix": this_prefix, "peer_asn": peer_asn, } if validator.validate(msg): msgs = normalize_msg_path(msg) for msg in msgs: key_generator(msg) log.debug(msg) producer.publish( msg, exchange=exchange, routing_key="update", serializer="json", ) else: log.warning("Invalid format message: {}".format(msg)) try: elem = rec.get_next_elem() except BaseException: continue
def run_bgpstream(prefixes=[], projects=[], start=0, end=0): """ Retrieve all records related to a list of prefixes https://bgpstream.caida.org/docs/api/pybgpstream/_pybgpstream.html :param prefix: <str> input prefix :param start: <int> start timestamp in UNIX epochs :param end: <int> end timestamp in UNIX epochs (if 0 --> "live mode") :return: - """ # create a new bgpstream instance and a reusable bgprecord instance stream = _pybgpstream.BGPStream() # consider collectors from given projects for project in projects: stream.add_filter('project', project) # filter prefixes for prefix in prefixes: stream.add_filter('prefix', prefix) # filter record type stream.add_filter('record-type', 'updates') # filter based on timing (if end=0 --> live mode) stream.add_interval_filter(start, end) # set live mode stream.set_live_mode() # start the stream stream.start() # print('BGPStream started...') # print('Projects ' + str(projects)) # print('Prefixes ' + str(prefixes)) # print('Start ' + str(start)) # print('End ' + str(end)) with Connection(RABBITMQ_HOST) as connection: exchange = Exchange( 'bgp-update', channel=connection, type='direct', durable=False) exchange.declare() producer = Producer(connection) while True: # get next record try: rec = stream.get_next_record() except BaseException: continue if (rec.status != "valid") or (rec.type != "update"): continue # get next element try: elem = rec.get_next_elem() except BaseException: continue while elem: if elem.type in ["A", "W"]: this_prefix = str(elem.fields['prefix']) service = "bgpstream|{}|{}".format( str(rec.project), str(rec.collector)) type_ = elem.type if type_ == "A": as_path = elem.fields['as-path'].split(' ') communities = [{'asn': int(comm.split(':')[0]), 'value': int(comm.split(':')[1])} for comm in elem.fields['communities']] else: as_path = [] communities = [] timestamp = float(rec.time) peer_asn = elem.peer_asn for prefix in prefixes: base_ip, mask_length = this_prefix.split('/') our_prefix = IPNetwork(prefix) if IPAddress(base_ip) in our_prefix and int( mask_length) >= our_prefix.prefixlen: msg = { 'type': type_, 'timestamp': timestamp, 'path': as_path, 'service': service, 'communities': communities, 'prefix': this_prefix, 'peer_asn': peer_asn } if mformat_validator(msg): msgs = normalize_msg_path(msg) for msg in msgs: key_generator(msg) log.debug(msg) producer.publish( msg, exchange=exchange, routing_key='update', serializer='json' ) else: log.warning('Invalid format message: {}'.format(msg)) try: elem = rec.get_next_elem() except BaseException: continue
def parse_ripe_ris(connection, prefixes_file, hosts): exchange = Exchange("bgp-update", channel=connection, type="direct", durable=False) exchange.declare() prefixes = load_json(prefixes_file) assert prefixes is not None prefix_tree = radix.Radix() for prefix in prefixes: prefix_tree.add(prefix) ris_suffix = os.getenv("RIS_ID", "my_as") validator = mformat_validator() with Producer(connection) as producer: while True: try: events = requests.get( "https://ris-live.ripe.net/v1/stream/?format=json&client=artemis-{}".format( ris_suffix ), stream=True, timeout=10, ) # http://docs.python-requests.org/en/latest/user/advanced/#streaming-requests iterator = events.iter_lines() next(iterator) for data in iterator: try: parsed = json.loads(data) msg = parsed["data"] if "type" in parsed and parsed["type"] == "ris_error": log.error(msg) # also check if ris host is in the configuration elif ( "type" in msg and msg["type"] == "UPDATE" and (not hosts or msg["host"] in hosts) ): norm_ris_msgs = normalize_ripe_ris(msg, prefix_tree) for norm_ris_msg in norm_ris_msgs: redis.set( "ris_seen_bgp_update", "1", ex=int( os.getenv( "MON_TIMEOUT_LAST_BGP_UPDATE", DEFAULT_MON_TIMEOUT_LAST_BGP_UPDATE, ) ), ) if validator.validate(norm_ris_msg): norm_path_msgs = normalize_msg_path(norm_ris_msg) for norm_path_msg in norm_path_msgs: key_generator(norm_path_msg) log.debug(norm_path_msg) producer.publish( norm_path_msg, exchange=exchange, routing_key="update", serializer="json", ) else: log.warning( "Invalid format message: {}".format(msg) ) except json.decoder.JSONDecodeError: log.exception("Message {}".format(data)) except Exception: log.exception("exception") log.warning("Iterator ran out of data; the connection will be retried") except Exception: log.exception("server closed connection") time.sleep(5)
def parse_bgpstreamhist_csvs(prefixes_file=None, input_dir=None): prefixes = load_json(prefixes_file) assert prefixes is not None with Connection(RABBITMQ_URI) as connection: exchange = Exchange("bgp-update", channel=connection, type="direct", durable=False) exchange.declare() producer = Producer(connection) validator = mformat_validator() for csv_file in glob.glob("{}/*.csv".format(input_dir)): try: with open(csv_file, "r") as f: csv_reader = csv.reader(f, delimiter="|") for row in csv_reader: try: if len(row) != 9: continue if row[0].startswith("#"): continue # example row: 139.91.0.0/16|8522|1403|1403 6461 2603 21320 # 5408 # 8522|routeviews|route-views2|A|"[{""asn"":1403,""value"":6461}]"|1517446677 this_prefix = row[0] if row[6] == "A": as_path = row[3].split(" ") communities = json.loads(row[7]) else: as_path = [] communities = [] service = "historical|{}|{}".format(row[4], row[5]) type_ = row[6] timestamp = float(row[8]) peer_asn = int(row[2]) for prefix in prefixes: try: base_ip, mask_length = this_prefix.split( "/") our_prefix = IPNetwork(prefix) if (IPAddress(base_ip) in our_prefix and int(mask_length) >= our_prefix.prefixlen): msg = { "type": type_, "timestamp": timestamp, "path": as_path, "service": service, "communities": communities, "prefix": this_prefix, "peer_asn": peer_asn, } if validator.validate(msg): msgs = normalize_msg_path(msg) for msg in msgs: key_generator(msg) log.debug(msg) producer.publish( msg, exchange=exchange, routing_key="update", serializer="json", ) time.sleep(0.1) else: log.warning( "Invalid format message: {}". format(msg)) except Exception: log.exception("prefix") except Exception: log.exception("row") except Exception: log.exception("exception")
def parse_bgpstreamhist_csvs(self): # add /0 if autoconf if self.autoconf: self.prefixes.append("0.0.0.0/0") self.prefixes.append("::/0") with Connection(RABBITMQ_URI) as connection: self.update_exchange = Exchange("bgp-update", channel=connection, type="direct", durable=False) self.update_exchange.declare() producer = Producer(connection) validator = mformat_validator() for csv_file in glob.glob("{}/*.csv".format(self.input_dir)): try: with open(csv_file, "r") as f: csv_reader = csv.reader(f, delimiter="|") for row in csv_reader: try: if len(row) != 9: continue if row[0].startswith("#"): continue # example row: 139.91.0.0/16|8522|1403|1403 6461 2603 21320 # 5408 # 8522|routeviews|route-views2|A|"[{""asn"":1403,""value"":6461}]"|1517446677 this_prefix = row[0] if row[6] == "A": as_path = row[3].split(" ") communities = json.loads(row[7]) else: as_path = [] communities = [] service = "historical|{}|{}".format( row[4], row[5]) type_ = row[6] timestamp = float(row[8]) peer_asn = int(row[2]) for prefix in self.prefixes: try: base_ip, mask_length = this_prefix.split( "/") our_prefix = IPNetwork(prefix) if (IPAddress(base_ip) in our_prefix and int(mask_length) >= our_prefix.prefixlen): msg = { "type": type_, "timestamp": timestamp, "path": as_path, "service": service, "communities": communities, "prefix": this_prefix, "peer_asn": peer_asn, } if validator.validate(msg): msgs = normalize_msg_path(msg) for msg in msgs: key_generator(msg) log.debug(msg) if self.autoconf: if str(our_prefix) in [ "0.0.0.0/0", "::/0", ]: if msg["type"] == "A": as_path = clean_as_path( msg["path"] ) if len(as_path ) > 1: # ignore, since this is not a self-network origination, but sth transit break elif msg[ "type"] == "W": # ignore irrelevant withdrawals break self.autoconf_goahead = False correlation_id = uuid() callback_queue = Queue( uuid(), durable=False, auto_delete=True, max_priority=4, consumer_arguments={ "x-priority": 4 }, ) producer.publish( msg, exchange="", routing_key= "conf-autoconf-update-queue", reply_to= callback_queue. name, correlation_id= correlation_id, retry=True, declare=[ Queue( "conf-autoconf-update-queue", durable= False, max_priority =4, consumer_arguments ={ "x-priority": 4 }, ), callback_queue, ], priority=4, serializer="ujson", ) with Consumer( connection, on_message=self . handle_autoconf_update_goahead_reply, queues= [callback_queue], accept=[ "ujson" ], ): while (not self. autoconf_goahead ): connection.drain_events( ) producer.publish( msg, exchange=self. update_exchange, routing_key="update", serializer="ujson", ) time.sleep(0.1) else: log.warning( "Invalid format message: {}" .format(msg)) break except Exception: log.exception("prefix") except Exception: log.exception("row") except Exception: log.exception("exception")
def exabgp_msg(bgp_message): redis.set( "exabgp_seen_bgp_update", "1", ex=int( os.getenv( "MON_TIMEOUT_LAST_BGP_UPDATE", DEFAULT_MON_TIMEOUT_LAST_BGP_UPDATE, )), ) msg = { "type": bgp_message["type"], "communities": bgp_message.get("communities", []), "timestamp": float(bgp_message["timestamp"]), "path": bgp_message.get("path", []), "service": "exabgp|{}".format(self.host), "prefix": bgp_message["prefix"], "peer_asn": int(bgp_message["peer_asn"]), } for prefix in self.prefixes: try: base_ip, mask_length = bgp_message["prefix"].split( "/") our_prefix = IPNetwork(prefix) if (IPAddress(base_ip) in our_prefix and int(mask_length) >= our_prefix.prefixlen): if validator.validate(msg): with Producer(connection) as producer: msgs = normalize_msg_path(msg) for msg in msgs: key_generator(msg) log.debug(msg) if self.autoconf: if str(our_prefix) in [ "0.0.0.0/0", "::/0", ]: if msg["type"] == "A": as_path = clean_as_path( msg["path"]) if len(as_path) > 1: # ignore, since this is not a self-network origination, but sth transit break elif msg["type"] == "W": # ignore irrelevant withdrawals break self.autoconf_goahead = False correlation_id = uuid() callback_queue = Queue( uuid(), durable=False, auto_delete=True, max_priority=4, consumer_arguments={ "x-priority": 4 }, ) producer.publish( msg, exchange="", routing_key= "conf-autoconf-update-queue", reply_to=callback_queue. name, correlation_id= correlation_id, retry=True, declare=[ Queue( "conf-autoconf-update-queue", durable=False, max_priority=4, consumer_arguments={ "x-priority": 4 }, ), callback_queue, ], priority=4, serializer="ujson", ) with Consumer( connection, on_message=self. handle_autoconf_update_goahead_reply, queues=[ callback_queue ], accept=["ujson"], ): while not self.autoconf_goahead: connection.drain_events( ) producer.publish( msg, exchange=self.update_exchange, routing_key="update", serializer="ujson", ) else: log.warning( "Invalid format message: {}".format( msg)) break except Exception: log.exception("exception")
def run_bgpstream_beta_bmp(prefixes=[]): ''' Retrieve all elements related to a list of prefixes https://bgpstream.caida.org/docs/api/pybgpstream/_pybgpstream.html :param prefix: <str> input prefix :return: - ''' # create a new bgpstream instance stream = _pybgpstream.BGPStream() # set BMP data interface stream.set_data_interface('beta-bmp-stream') # filter prefixes for prefix in prefixes: stream.add_filter('prefix', prefix) # filter record type stream.add_filter('record-type', 'updates') # set live mode stream.set_live_mode() # start the stream stream.start() with Connection(RABBITMQ_HOST) as connection: exchange = Exchange( 'bgp-update', channel=connection, type='direct', durable=False) exchange.declare() producer = Producer(connection) while True: # get next record try: rec = stream.get_next_record() except BaseException: continue if (rec.status != 'valid') or (rec.type != 'update'): continue # get next element try: elem = rec.get_next_elem() except BaseException: continue while elem: if elem.type in ['A', 'W']: this_prefix = str(elem.fields['prefix']) service = 'betabmp|{}|{}'.format( str(rec.project), str(rec.collector)) type_ = elem.type if type_ == 'A': as_path = elem.fields['as-path'].split(' ') communities = [{'asn': int(comm.split(':')[0]), 'value': int(comm.split(':')[1])} for comm in elem.fields['communities']] else: as_path = [] communities = [] timestamp = float(rec.time) peer_asn = elem.peer_asn for prefix in prefixes: base_ip, mask_length = this_prefix.split('/') our_prefix = IPNetwork(prefix) if IPAddress(base_ip) in our_prefix and int( mask_length) >= our_prefix.prefixlen: msg = { 'type': type_, 'timestamp': timestamp, 'path': as_path, 'service': service, 'communities': communities, 'prefix': this_prefix, 'peer_asn': peer_asn } if mformat_validator(msg): msgs = normalize_msg_path(msg) for msg in msgs: key_generator(msg) log.debug(msg) producer.publish( msg, exchange=exchange, routing_key='update', serializer='json' ) else: log.warning('Invalid format message: {}'.format(msg)) try: elem = rec.get_next_elem() except BaseException: continue
def run_bgpstream( prefixes_file=None, kafka_host=None, kafka_port=None, kafka_topic="openbmp.bmp_raw", start=0, end=0, ): """ Retrieve all records related to a list of prefixes https://bgpstream.caida.org/docs/api/pybgpstream/_pybgpstream.html :param prefixes_file: <str> input prefix json :param kafka_host: <str> kafka host :param kafka_port: <int> kafka_port :param kafka_topic: <str> kafka topic :param start: <int> start timestamp in UNIX epochs :param end: <int> end timestamp in UNIX epochs (if 0 --> "live mode") :return: - """ prefixes = load_json(prefixes_file) assert prefixes is not None # create a new bgpstream instance and a reusable bgprecord instance stream = _pybgpstream.BGPStream() # set kafka data interface stream.set_data_interface("kafka") # set host connection details stream.set_data_interface_option("kafka", "brokers", "{}:{}".format(kafka_host, kafka_port)) # set topic stream.set_data_interface_option("kafka", "topic", kafka_topic) # filter prefixes for prefix in prefixes: stream.add_filter("prefix", prefix) # filter record type stream.add_filter("record-type", "updates") # filter based on timing (if end=0 --> live mode) stream.add_interval_filter(start, end) # set live mode stream.set_live_mode() # start the stream stream.start() with Connection(RABBITMQ_URI) as connection: exchange = Exchange("bgp-update", channel=connection, type="direct", durable=False) exchange.declare() producer = Producer(connection) validator = mformat_validator() while True: # get next record try: rec = stream.get_next_record() except BaseException: continue if (rec.status != "valid") or (rec.type != "update"): continue # get next element try: elem = rec.get_next_elem() except BaseException: continue while elem: if elem.type in {"A", "W"}: redis.set( "bgpstreamkafka_seen_bgp_update", "1", ex=int( os.getenv( "MON_TIMEOUT_LAST_BGP_UPDATE", DEFAULT_MON_TIMEOUT_LAST_BGP_UPDATE, )), ) this_prefix = str(elem.fields["prefix"]) service = "bgpstreamkafka|{}".format(str(rec.collector)) type_ = elem.type if type_ == "A": as_path = elem.fields["as-path"].split(" ") communities = [{ "asn": int(comm.split(":")[0]), "value": int(comm.split(":")[1]), } for comm in elem.fields["communities"]] else: as_path = [] communities = [] timestamp = float(rec.time) if timestamp == 0: timestamp = time.time() log.debug("fixed timestamp: {}".format(timestamp)) peer_asn = elem.peer_asn for prefix in prefixes: base_ip, mask_length = this_prefix.split("/") our_prefix = IPNetwork(prefix) if (IPAddress(base_ip) in our_prefix and int(mask_length) >= our_prefix.prefixlen): msg = { "type": type_, "timestamp": timestamp, "path": as_path, "service": service, "communities": communities, "prefix": this_prefix, "peer_asn": peer_asn, } try: if validator.validate(msg): msgs = normalize_msg_path(msg) for msg in msgs: key_generator(msg) log.debug(msg) producer.publish( msg, exchange=exchange, routing_key="update", serializer="ujson", ) else: log.warning( "Invalid format message: {}".format( msg)) except BaseException: log.exception( "Error when normalizing BGP message: {}". format(msg)) break try: elem = rec.get_next_elem() except BaseException: continue
def parse_bgpstreamhist_csvs(prefixes=[], input_dir=None): with Connection(RABBITMQ_HOST) as connection: exchange = Exchange('bgp-update', channel=connection, type='direct', durable=False) exchange.declare() producer = Producer(connection) for csv_file in glob.glob("{}/*.csv".format(input_dir)): try: with open(csv_file, 'r') as f: csv_reader = csv.reader(f, delimiter="|") for row in csv_reader: try: if len(row) != 9: continue if row[0].startswith('#'): continue # example row: 139.91.0.0/16|8522|1403|1403 6461 2603 21320 # 5408 # 8522|routeviews|route-views2|A|"[{""asn"":1403,""value"":6461}]"|1517446677 this_prefix = row[0] if row[6] == 'A': as_path = row[3].split(' ') communities = json.loads(row[7]) else: as_path = [] communities = [] service = "historical|{}|{}".format(row[4], row[5]) type_ = row[6] timestamp = float(row[8]) peer_asn = int(row[2]) for prefix in prefixes: try: base_ip, mask_length = this_prefix.split( '/') our_prefix = IPNetwork(prefix) if IPAddress( base_ip) in our_prefix and int( mask_length ) >= our_prefix.prefixlen: msg = { 'type': type_, 'timestamp': timestamp, 'path': as_path, 'service': service, 'communities': communities, 'prefix': this_prefix, 'peer_asn': peer_asn } if mformat_validator(msg): msgs = normalize_msg_path(msg) for msg in msgs: key_generator(msg) log.debug(msg) producer.publish( msg, exchange=exchange, routing_key='update', serializer='json') else: log.warning( 'Invalid format message: {}'. format(msg)) except Exception: log.exception('prefix') except Exception: log.exception('row') except Exception: log.exception('exception')
def run_bgpstream_beta_bmp(prefixes_file=None): """ Retrieve all elements related to a list of prefixes https://bgpstream.caida.org/docs/api/pybgpstream/_pybgpstream.html :param prefixes_file: <str> input prefix json :return: - """ prefixes = load_json(prefixes_file) assert prefixes is not None # create a new bgpstream instance stream = _pybgpstream.BGPStream() # set BMP data interface stream.set_data_interface("beta-bmp-stream") # filter prefixes for prefix in prefixes: stream.add_filter("prefix", prefix) # filter record type stream.add_filter("record-type", "updates") # set live mode stream.set_live_mode() # start the stream stream.start() with Connection(RABBITMQ_URI) as connection: exchange = Exchange("bgp-update", channel=connection, type="direct", durable=False) exchange.declare() producer = Producer(connection) validator = mformat_validator() while True: # get next record try: rec = stream.get_next_record() except BaseException: continue if (rec.status != "valid") or (rec.type != "update"): continue # get next element try: elem = rec.get_next_elem() except BaseException: continue while elem: if elem.type in {"A", "W"}: redis.set( "betabmp_seen_bgp_update", "1", ex=int( os.getenv( "MON_TIMEOUT_LAST_BGP_UPDATE", DEFAULT_MON_TIMEOUT_LAST_BGP_UPDATE, )), ) this_prefix = str(elem.fields["prefix"]) service = "betabmp|{}|{}".format(str(rec.project), str(rec.collector)) type_ = elem.type if type_ == "A": as_path = elem.fields["as-path"].split(" ") communities = [{ "asn": int(comm.split(":")[0]), "value": int(comm.split(":")[1]), } for comm in elem.fields["communities"]] else: as_path = [] communities = [] timestamp = float(rec.time) peer_asn = elem.peer_asn for prefix in prefixes: base_ip, mask_length = this_prefix.split("/") our_prefix = IPNetwork(prefix) if (IPAddress(base_ip) in our_prefix and int(mask_length) >= our_prefix.prefixlen): msg = { "type": type_, "timestamp": timestamp, "path": as_path, "service": service, "communities": communities, "prefix": this_prefix, "peer_asn": peer_asn, } if validator.validate(msg): msgs = normalize_msg_path(msg) for msg in msgs: key_generator(msg) log.debug(msg) producer.publish( msg, exchange=exchange, routing_key="update", serializer="json", ) else: log.warning( "Invalid format message: {}".format(msg)) try: elem = rec.get_next_elem() except BaseException: continue