def build_prefix_tree(self): log.info("Starting building autoignore prefix tree...") self.prefix_tree = { "v4": pytricia.PyTricia(32), "v6": pytricia.PyTricia(128), } raw_prefix_count = 0 for key in self.autoignore_rules: try: rule = self.autoignore_rules[key] for prefix in rule["prefixes"]: for translated_prefix in translate_rfc2622(prefix): ip_version = get_ip_version(translated_prefix) if self.prefix_tree[ip_version].has_key( translated_prefix): node = self.prefix_tree[ip_version][ translated_prefix] else: node = { "prefix": translated_prefix, "rule_key": key } self.prefix_tree[ip_version].insert( translated_prefix, node) raw_prefix_count += 1 except Exception: log.exception("Exception") log.info( "{} prefixes integrated in autoignore prefix tree in total". format(raw_prefix_count)) log.info("Finished building autoignore prefix tree.")
def init_mitigation(self): log.info("Initiating mitigation...") log.info("Starting building mitigation prefix tree...") self.prefix_tree = { "v4": pytricia.PyTricia(32), "v6": pytricia.PyTricia(128), } raw_prefix_count = 0 for rule in self.rules: try: for prefix in rule["prefixes"]: for translated_prefix in translate_rfc2622(prefix): ip_version = get_ip_version(translated_prefix) node = { "prefix": translated_prefix, "data": { "mitigation": rule["mitigation"] }, } self.prefix_tree[ip_version].insert( translated_prefix, node) raw_prefix_count += 1 except Exception: log.exception("Exception") log.info( "{} prefixes integrated in mitigation prefix tree in total". format(raw_prefix_count)) log.info("Finished building mitigation prefix tree.") log.info("Mitigation initiated, configured and running.")
def init_detection(self) -> NoReturn: """ Updates rules everytime it receives a new configuration. """ log.info("Initiating detection...") log.info("Starting building detection prefix tree...") self.prefix_tree = { "v4": pytricia.PyTricia(32), "v6": pytricia.PyTricia(128), } raw_prefix_count = 0 for rule in self.rules: try: rule_translated_origin_asn_set = set() for asn in rule["origin_asns"]: this_translated_asn_list = flatten(translate_asn_range(asn)) rule_translated_origin_asn_set.update( set(this_translated_asn_list) ) rule["origin_asns"] = list(rule_translated_origin_asn_set) rule_translated_neighbor_set = set() for asn in rule["neighbors"]: this_translated_asn_list = flatten(translate_asn_range(asn)) rule_translated_neighbor_set.update( set(this_translated_asn_list) ) rule["neighbors"] = list(rule_translated_neighbor_set) conf_obj = { "origin_asns": rule["origin_asns"], "neighbors": rule["neighbors"], "policies": set(rule["policies"]), "community_annotations": rule["community_annotations"], } for prefix in rule["prefixes"]: for translated_prefix in translate_rfc2622(prefix): ip_version = get_ip_version(translated_prefix) if self.prefix_tree[ip_version].has_key(translated_prefix): node = self.prefix_tree[ip_version][translated_prefix] else: node = { "prefix": translated_prefix, "data": {"confs": []}, } self.prefix_tree[ip_version].insert( translated_prefix, node ) node["data"]["confs"].append(conf_obj) raw_prefix_count += 1 except Exception: log.exception("Exception") log.info( "{} prefixes integrated in detection prefix tree in total".format( raw_prefix_count ) ) log.info("Finished building detection prefix tree.") log.info("Detection initiated, configured and running.")
def __check_prefixes(_prefixes): for prefix_group, prefixes in _prefixes.items(): for prefix in prefixes: if translate_rfc2622(prefix, just_match=True): continue try: str2ip(prefix) except Exception: raise ArtemisError("invalid-prefix", prefix)
def start_monitors(self): for proc_id in self.process_ids: try: proc_id[1].terminate() except ProcessLookupError: log.exception("process terminate") self.process_ids.clear() self.prefixes.clear() self.prefix_tree = radix.Radix() for rule in self.rules: try: rule_translated_prefix_set = set() for prefix in rule["prefixes"]: this_translated_prefix_list = flatten( translate_rfc2622(prefix)) rule_translated_prefix_set.update( set(this_translated_prefix_list)) rule["prefixes"] = list(rule_translated_prefix_set) for prefix in rule["prefixes"]: node = self.prefix_tree.add(prefix) rule_translated_origin_asn_set = set() for asn in rule["origin_asns"]: this_translated_asn_list = flatten( translate_asn_range(asn)) rule_translated_origin_asn_set.update( set(this_translated_asn_list)) rule["origin_asns"] = list( rule_translated_origin_asn_set) rule_translated_neighbor_set = set() for asn in rule["neighbors"]: this_translated_asn_list = flatten( translate_asn_range(asn)) rule_translated_neighbor_set.update( set(this_translated_asn_list)) rule["neighbors"] = list(rule_translated_neighbor_set) node.data["origin_asns"] = rule["origin_asns"] node.data["neighbors"] = rule["neighbors"] node.data["mitigation"] = rule["mitigation"] except Exception: log.exception("Exception") # only keep super prefixes for monitors for prefix in self.prefix_tree.prefixes(): self.prefixes.add(self.prefix_tree.search_worst(prefix).prefix) dump_json(list(self.prefixes), self.prefix_file) self.init_ris_instances() self.init_exabgp_instances() self.init_bgpstreamhist_instance() self.init_bgpstreamlive_instance() self.init_betabmp_instance()
def start_monitors(self): log.info("Initiating monitor...") for proc_id in self.process_ids: try: proc_id[1].terminate() except ProcessLookupError: log.exception("process terminate") self.process_ids.clear() self.prefixes.clear() log.info("Starting building monitor prefix tree...") self.prefix_tree = { "v4": pytricia.PyTricia(32), "v6": pytricia.PyTricia(128), } raw_prefix_count = 0 for rule in self.rules: try: for prefix in rule["prefixes"]: for translated_prefix in translate_rfc2622(prefix): ip_version = get_ip_version(translated_prefix) self.prefix_tree[ip_version].insert(translated_prefix, "") raw_prefix_count += 1 except Exception: log.exception("Exception") log.info( "{} prefixes integrated in monitor prefix tree in total".format( raw_prefix_count ) ) log.info("Finished building monitor prefix tree.") # only keep super prefixes for monitors log.info("Calculating monitored prefixes for monitor to supervise...") for ip_version in self.prefix_tree: for prefix in self.prefix_tree[ip_version]: worst_prefix = search_worst_prefix( prefix, self.prefix_tree[ip_version] ) if worst_prefix: self.prefixes.add(worst_prefix) dump_json(list(self.prefixes), self.prefix_file) log.info("Calculated monitored prefixes for monitor to supervise.") log.info("Initiating configured monitoring instances....") self.init_ris_instance() self.init_exabgp_instance() self.init_bgpstreamhist_instance() self.init_bgpstreamlive_instance() self.init_bgpstreamkafka_instance() log.info("All configured monitoring instances initiated.") log.info("Monitor initiated, configured and running.")
def init_mitigation(self): self.prefix_tree = radix.Radix() for rule in self.rules: rule_translated_prefix_set = set() for prefix in rule["prefixes"]: this_translated_prefix_list = flatten( translate_rfc2622(prefix)) rule_translated_prefix_set.update( set(this_translated_prefix_list)) rule["prefixes"] = list(rule_translated_prefix_set) for prefix in rule["prefixes"]: node = self.prefix_tree.add(prefix) node.data["mitigation"] = rule["mitigation"]
def init_detection(self) -> NoReturn: """ Updates rules everytime it receives a new configuration. """ self.prefix_tree = radix.Radix() for rule in self.rules: rule_translated_prefix_set = set() for prefix in rule["prefixes"]: this_translated_prefix_list = flatten( translate_rfc2622(prefix)) rule_translated_prefix_set.update( set(this_translated_prefix_list)) rule["prefixes"] = list(rule_translated_prefix_set) for prefix in rule["prefixes"]: node = self.prefix_tree.search_exact(prefix) if not node: node = self.prefix_tree.add(prefix) node.data["confs"] = [] rule_translated_origin_asn_set = set() for asn in rule["origin_asns"]: this_translated_asn_list = flatten( translate_asn_range(asn)) rule_translated_origin_asn_set.update( set(this_translated_asn_list)) rule["origin_asns"] = list(rule_translated_origin_asn_set) rule_translated_neighbor_set = set() for asn in rule["neighbors"]: this_translated_asn_list = flatten( translate_asn_range(asn)) rule_translated_neighbor_set.update( set(this_translated_asn_list)) rule["neighbors"] = list(rule_translated_neighbor_set) conf_obj = { "origin_asns": rule["origin_asns"], "neighbors": rule["neighbors"], "policies": set(rule["policies"]), "community_annotations": rule["community_annotations"], } node.data["confs"].append(conf_obj)
def __check_rules(self, _rules): for rule in _rules: for field in rule: if field not in self.supported_fields: log.warning("unsupported field found {} in {}".format( field, rule)) rule["prefixes"] = flatten(rule["prefixes"]) for prefix in rule["prefixes"]: if translate_rfc2622(prefix, just_match=True): continue try: str2ip(prefix) except Exception: raise ArtemisError("invalid-prefix", prefix) rule["origin_asns"] = flatten(rule.get("origin_asns", [])) if rule["origin_asns"] == ["*"]: rule["origin_asns"] = [-1] rule["neighbors"] = flatten(rule.get("neighbors", [])) if rule["neighbors"] == ["*"]: rule["neighbors"] = [-1] rule["mitigation"] = flatten(rule.get("mitigation", "manual")) rule["policies"] = flatten(rule.get("policies", [])) rule["community_annotations"] = rule.get( "community_annotations", []) if not isinstance(rule["community_annotations"], list): raise ArtemisError("invalid-outer-list-comm-annotations", "") seen_community_annotations = set() for annotation_entry_outer in rule["community_annotations"]: if not isinstance(annotation_entry_outer, dict): raise ArtemisError("invalid-dict-comm-annotations", "") for annotation in annotation_entry_outer: if annotation in seen_community_annotations: raise ArtemisError( "duplicate-community-annotation", annotation) seen_community_annotations.add(annotation) if not isinstance(annotation_entry_outer[annotation], list): raise ArtemisError( "invalid-inner-list-comm-annotations", annotation) for annotation_entry_inner in annotation_entry_outer[ annotation]: for key in annotation_entry_inner: if key not in ["in", "out"]: raise ArtemisError( "invalid-community-annotation-key", key) in_communities = flatten( annotation_entry_inner.get("in", [])) for community in in_communities: if not re.match(r"\d+\:\d+", community): raise ArtemisError("invalid-bgp-community", community) out_communities = flatten( annotation_entry_inner.get("out", [])) for community in out_communities: if not re.match(r"\d+\:\d+", community): raise ArtemisError("invalid-bgp-community", community) for asn in rule["origin_asns"] + rule["neighbors"]: if translate_asn_range(asn, just_match=True): continue if not isinstance(asn, int): raise ArtemisError("invalid-asn", asn)
def check(self, data: Text) -> Dict: """ Checks if all sections and fields are defined correctly in the parsed configuration. Raises custom exceptions in case a field or section is misdefined. """ for section in data: if section not in self.sections: raise ArtemisError("invalid-section", section) data["prefixes"] = {k: flatten(v) for k, v in data["prefixes"].items()} for prefix_group, prefixes in data["prefixes"].items(): for prefix in prefixes: if translate_rfc2622(prefix, just_match=True): continue try: str2ip(prefix) except Exception: raise ArtemisError("invalid-prefix", prefix) for rule in data["rules"]: for field in rule: if field not in self.supported_fields: log.warning( "unsupported field found {} in {}".format(field, rule) ) rule["prefixes"] = flatten(rule["prefixes"]) for prefix in rule["prefixes"]: if translate_rfc2622(prefix, just_match=True): continue try: str2ip(prefix) except Exception: raise ArtemisError("invalid-prefix", prefix) rule["origin_asns"] = flatten(rule.get("origin_asns", [])) if rule["origin_asns"] == ["*"]: rule["origin_asns"] = [-1] rule["neighbors"] = flatten(rule.get("neighbors", [])) if rule["neighbors"] == ["*"]: rule["neighbors"] = [-1] rule["mitigation"] = flatten(rule.get("mitigation", "manual")) rule["policies"] = flatten(rule.get("policies", [])) rule["community_annotations"] = rule.get("community_annotations", []) if not isinstance(rule["community_annotations"], list): raise ArtemisError("invalid-outer-list-comm-annotations", "") seen_community_annotations = set() for annotation_entry_outer in rule["community_annotations"]: if not isinstance(annotation_entry_outer, dict): raise ArtemisError("invalid-dict-comm-annotations", "") for annotation in annotation_entry_outer: if annotation in seen_community_annotations: raise ArtemisError( "duplicate-community-annotation", annotation ) seen_community_annotations.add(annotation) if not isinstance(annotation_entry_outer[annotation], list): raise ArtemisError( "invalid-inner-list-comm-annotations", annotation ) for annotation_entry_inner in annotation_entry_outer[ annotation ]: for key in annotation_entry_inner: if key not in ["in", "out"]: raise ArtemisError( "invalid-community-annotation-key", key ) in_communities = flatten( annotation_entry_inner.get("in", []) ) for community in in_communities: if not re.match(r"\d+\:\d+", community): raise ArtemisError( "invalid-bgp-community", community ) out_communities = flatten( annotation_entry_inner.get("out", []) ) for community in out_communities: if not re.match(r"\d+\:\d+", community): raise ArtemisError( "invalid-bgp-community", community ) for asn in rule["origin_asns"] + rule["neighbors"]: if translate_asn_range(asn, just_match=True): continue if not isinstance(asn, int): raise ArtemisError("invalid-asn", asn) if "monitors" in data: for key, info in data["monitors"].items(): if key not in self.supported_monitors: raise ArtemisError("invalid-monitor", key) elif key == "riperis": for unavailable in set(info).difference(self.available_ris): log.warning("unavailable monitor {}".format(unavailable)) elif key == "bgpstreamlive": if not info or not set(info).issubset( self.available_bgpstreamlive ): raise ArtemisError("invalid-bgpstreamlive-project", info) elif key == "exabgp": for entry in info: if "ip" not in entry and "port" not in entry: raise ArtemisError("invalid-exabgp-info", entry) if entry["ip"] != "exabgp": try: str2ip(entry["ip"]) except Exception: raise ArtemisError("invalid-exabgp-ip", entry["ip"]) if not isinstance(entry["port"], int): raise ArtemisError("invalid-exabgp-port", entry["port"]) data["asns"] = {k: flatten(v) for k, v in data["asns"].items()} for name, asns in data["asns"].items(): for asn in asns: if translate_asn_range(asn, just_match=True): continue if not isinstance(asn, int): raise ArtemisError("invalid-asn", asn) return data