def find_more_specific(consumer): rib = Radix() for item in consumer: data = json.loads(item.value) if "as_path" not in data: continue for node in rib.search_covering(data["prefix"]): for peer, as_path in node.data.iteritems(): if peer != data["peer_as"]: yield (node.prefix, as_path, data["prefix"], data["as_path"]) node = rib.add(data["prefix"]) node.data[data["peer_as"]] = data["as_path"]
bootstrap_servers=args.our_servers.split(","), group_id="follower") if args.offset is not None: consumer.set_topic_partitions({(t, 0): args.offset for t in topics}) # setup filters filters = [] if args.anycast_file is not None: anycast = Radix() count = 0 with open(args.anycast_file, "r") as f: for prefix in f: if not prefix.startswith("#"): anycast.add(prefix.strip()) count += 1 logger.info("loaded %s prefixes in the anycast list", count) logger.info("filtering on prefixes from the file %s", args.anycast_file) else: raise ValueError("please provide a anycast prefix list file") if args.as_rel_file is not None and args.ppdc_ases_file is not None: relations, childs, parents = caida_filter_annaunce(args.as_rel_file, args.ppdc_ases_file) else: raise ValueError("caida files required") for event in find_more_specific(consumer): try: as_path1 = event[1].split(" ")
consumer = KafkaConsumer(*topics, bootstrap_servers=args.our_servers.split(","), group_id="follower") if args.offset is not None: consumer.set_topic_partitions({(t, 0): args.offset for t in topics}) # setup filters filters = [] if args.prefixes_file is not None: filter_prefixes = Radix() with open(args.prefixes_file, "r") as f: for prefix in f: filter_prefixes.add(prefix.strip()) def func(data): return len(list(filter_prefixes.search_covering( data["prefix"]))) > 0 logger.info("filtering on prefixes from the file %s", args.prefixes_file) filters.append(func) for item in consumer: if item.value is None: continue data = json.loads(item.value) if len(filters) == 0 or any(f(data) for f in filters): print(data)
args = parser.parse_args() logging.basicConfig(level=logging.INFO) consumer = KafkaConsumer("conflicts", bootstrap_servers=["comet-17-22.sdsc.edu:9092", "comet-17-11.sdsc.edu:9092", "comet-17-26.sdsc.edu:9092"], group_id="client") if args.offset is not None: topics = [("conflicts", i, args.offset) for i in PARTITIONS.values()] consumer.set_topic_partitions(*topics) # setup filters filters = [] if args.prefixes_file is not None: filter_prefixes = Radix() with open(args.prefixes_file, "r") as f: for prefix in f: filter_prefixes.add(prefix.strip()) def func(data): announce = data.get("announce") return announce is not None and announce["prefix"] in filter_prefixes logger.info("filtering on prefixes from the file %s", args.prefixes_file) filters.append(func) for item in consumer: data = json.loads(item.value) if len(filters) == 0 or any(f(data) for f in filters): print(data)
bootstrap_servers=args.our_servers.split(","), group_id="follower") if args.offset is not None: consumer.set_topic_partitions({(t, 0): args.offset for t in topics}) # setup filters filters = [] if args.anycast_file is not None: anycast = Radix() count = 0 with open(args.anycast_file, "r") as f: for prefix in f: if not prefix.startswith("#"): anycast.add(prefix.strip()) count += 1 logger.info("loaded %s prefixes in the anycast list", count) logger.info("filtering on prefixes from the file %s", args.anycast_file) else: raise ValueError("please provide a anycast prefix list file") if args.as_rel_file is not None and args.ppdc_ases_file is not None: relations, childs, parents = caida_filter_annaunce( args.as_rel_file, args.ppdc_ases_file) else: raise ValueError("caida files required") for event in find_more_specific(consumer):