Esempio n. 1
0
def parse_registry_data(irr_org_file=None,
                        irr_mnt_file=None,
                        irr_ro_file=None,
                        rpki_roa_file=None):
    logger.info("loading metadata...")
    funcs = [annotate_if_direct]

    if irr_org_file is not None and irr_mnt_file is not None:
        relations_dict = dict()
        fill_relation_struct(irr_org_file, relations_dict, "organisations")
        fill_relation_struct(irr_mnt_file, relations_dict, "maintainers")
        funcs.append(partial(annotate_if_relation, relations_dict))

    if irr_ro_file is not None:
        ro_rad_tree = Radix()
        fill_ro_struct(irr_ro_file, ro_rad_tree)
        funcs.append(partial(annotate_if_route_objects, ro_rad_tree))

    if rpki_roa_file is not None:
        roa_rad_tree = Radix()
        fill_roa_struct(rpki_roa_file, roa_rad_tree)
        funcs.append(partial(annotate_if_roa, roa_rad_tree))

    funcs.append(annotate_with_type)

    return funcs
def find_more_specific(consumer):
    rib = Radix()
    for item in consumer:
        data = json.loads(item.value)
        if "as_path" not in data:
            continue
        for node in rib.search_covering(data["prefix"]):
            for peer, as_path in node.data.iteritems():
                if peer != data["peer_as"]:
                    yield (node.prefix, as_path, data["prefix"], data["as_path"])
        node = rib.add(data["prefix"])
        node.data[data["peer_as"]] = data["as_path"]
Esempio n. 3
0
def find_more_specific(consumer):
    rib = Radix()
    for item in consumer:
        data = json.loads(item.value)
        if "as_path" not in data:
            continue
        for node in rib.search_covering(data["prefix"]):
            for peer, as_path in node.data.iteritems():
                if peer != data["peer_as"]:
                    yield (node.prefix, as_path, data["prefix"],
                           data["as_path"])
        node = rib.add(data["prefix"])
        node.data[data["peer_as"]] = data["as_path"]
Esempio n. 4
0
def detect_hijacks(collector,
                   files,
                   irr_org_file=None,
                   irr_mnt_file=None,
                   irr_ro_file=None,
                   rpki_roa_file=None,
                   opener=default_opener,
                   format=mabo_format,
                   is_watched=None):
    """
    Detect BGP hijacks from `files' and annotate them using metadata.

    :param collector: Name of the collector the BGP files come from
    :param files: List of BGP files to process
    :param irr_org_file: CSV file containing irr,organisation,asn
    :param irr_mrt_file: CSV file containing irr,maintainer,asn
    :param irr_ro_file: CSV file containing irr,prefix,asn
    :param rpki_roa_file: CSV file containing asn,prefix,max_length,valid
    :param opener: Function to use in order to open the files
    :param format: Format of the BGP data in the files
    :return: Generator of hijacks (conflicts with annotation)
    """

    logger.info("loading metadata...")
    funcs = [annotate_if_direct]
    if irr_org_file is not None and irr_mnt_file is not None:
        relations_dict = dict()
        fill_relation_struct(irr_org_file, relations_dict, "organisations")
        fill_relation_struct(irr_mnt_file, relations_dict, "maintainers")
        funcs.append(partial(annotate_if_relation, relations_dict))

    if irr_ro_file is not None:
        ro_rad_tree = Radix()
        fill_ro_struct(irr_ro_file, ro_rad_tree)
        funcs.append(partial(annotate_if_route_objects, ro_rad_tree))

    if rpki_roa_file is not None:
        roa_rad_tree = Radix()
        fill_roa_struct(rpki_roa_file, roa_rad_tree)
        funcs.append(partial(annotate_if_roa, roa_rad_tree))

    funcs.append(annotate_with_type)
    logger.info("starting hijacks detection...")
    for conflict in detect_conflicts(collector,
                                     files,
                                     opener=opener,
                                     format=format,
                                     is_watched=is_watched):
        for f in funcs:
            f(conflict)
        yield conflict
    topics = ["rib-{}".format(c) for c in collectors]
    logger.info("using topics %s", topics)

    consumer = KafkaConsumer(*topics,
                             bootstrap_servers=args.our_servers.split(","),
                             group_id="follower")

    if args.offset is not None:
        consumer.set_topic_partitions({(t, 0): args.offset for t in topics})

    # setup filters
    filters = []

    if args.anycast_file is not None:
        anycast = Radix()
        count = 0
        with open(args.anycast_file, "r") as f:
            for prefix in f:
                if not prefix.startswith("#"):
                    anycast.add(prefix.strip())
                    count += 1
        logger.info("loaded %s prefixes in the anycast list", count)
        logger.info("filtering on prefixes from the file %s", args.anycast_file)
    else:
        raise ValueError("please provide a anycast prefix list file")

    if args.as_rel_file is not None and args.ppdc_ases_file is not None:
        relations, childs, parents = caida_filter_annaunce(args.as_rel_file, args.ppdc_ases_file)
    else:
        raise ValueError("caida files required")
Esempio n. 6
0
    parser.add_argument("--as-rel-file",
                        help="TXT file containing AS relation")
    parser.add_argument("--ppdc-ases-file")
    parser.add_argument("--as2org-file",
                        help="TXT file containing AS to organizations")

    args = parser.parse_args()

    logging.basicConfig(level=logging.INFO)

    kwargs = kafka_input(args.collector, broker=args.our_servers.split(","))

    logger.info("loading metadata...")
    funcs = [annotate_if_direct]
    if args.irr_ro_file is not None:
        ro_rad_tree = Radix()
        fill_ro_struct(args.irr_ro_file, ro_rad_tree)
        funcs.append(partial(annotate_if_route_objects, ro_rad_tree))

    if args.rpki_roa_file is not None:
        roa_rad_tree = Radix()
        fill_roa_struct(args.rpki_roa_file, roa_rad_tree)
        funcs.append(partial(annotate_if_roa, ro_rad_tree))

    if args.irr_org_file is not None and args.irr_mnt_file:
        relations_dict = dict()
        fill_relation_struct(args.irr_org_file, relations_dict,
                             "organisations")
        fill_relation_struct(args.irr_mnt_file, relations_dict, "maintainers")
        funcs.append(partial(annotate_if_relation, relations_dict))
        topics = ["rib-{}".format(c) for c in args.collector]
    else:
        topics = ["rib-{}".format(c) for c in COLLECTORS]

    consumer = KafkaConsumer(*topics,
                             bootstrap_servers=args.our_servers.split(","),
                             group_id="follower")

    if args.offset is not None:
        consumer.set_topic_partitions({(t, 0): args.offset for t in topics})

    # setup filters
    filters = []

    if args.prefixes_file is not None:
        filter_prefixes = Radix()
        with open(args.prefixes_file, "r") as f:
            for prefix in f:
                filter_prefixes.add(prefix.strip())

        def func(data):
            return len(list(filter_prefixes.search_covering(
                data["prefix"]))) > 0

        logger.info("filtering on prefixes from the file %s",
                    args.prefixes_file)
        filters.append(func)

    for item in consumer:
        if item.value is None:
            continue
Esempio n. 8
0
    args = parser.parse_args()

    logging.basicConfig(level=logging.INFO)

    consumer = KafkaConsumer("conflicts",
                             bootstrap_servers=["comet-17-22.sdsc.edu:9092", "comet-17-11.sdsc.edu:9092", "comet-17-26.sdsc.edu:9092"],
                             group_id="client")
    if args.offset is not None:
        topics = [("conflicts", i, args.offset) for i in PARTITIONS.values()]
        consumer.set_topic_partitions(*topics)

    # setup filters
    filters = []

    if args.prefixes_file is not None:
        filter_prefixes = Radix()
        with open(args.prefixes_file, "r") as f:
            for prefix in f:
                filter_prefixes.add(prefix.strip())

        def func(data):
            announce = data.get("announce")
            return announce is not None and announce["prefix"] in filter_prefixes
        logger.info("filtering on prefixes from the file %s", args.prefixes_file)
        filters.append(func)

    for item in consumer:
        data = json.loads(item.value)
        if len(filters) == 0 or any(f(data) for f in filters):
            print(data)
Esempio n. 9
0
    topics = ["rib-{}".format(c) for c in collectors]
    logger.info("using topics %s", topics)

    consumer = KafkaConsumer(*topics,
                             bootstrap_servers=args.our_servers.split(","),
                             group_id="follower")

    if args.offset is not None:
        consumer.set_topic_partitions({(t, 0): args.offset for t in topics})

    # setup filters
    filters = []

    if args.anycast_file is not None:
        anycast = Radix()
        count = 0
        with open(args.anycast_file, "r") as f:
            for prefix in f:
                if not prefix.startswith("#"):
                    anycast.add(prefix.strip())
                    count += 1
        logger.info("loaded %s prefixes in the anycast list", count)
        logger.info("filtering on prefixes from the file %s",
                    args.anycast_file)
    else:
        raise ValueError("please provide a anycast prefix list file")

    if args.as_rel_file is not None and args.ppdc_ases_file is not None:
        relations, childs, parents = caida_filter_annaunce(
            args.as_rel_file, args.ppdc_ases_file)