def __check_monitors(self, _monitors): for key, info in _monitors.items(): if key not in self.supported_monitors: raise ArtemisError("invalid-monitor", key) elif key == "riperis": for unavailable in set(info).difference( self.available_ris): log.warning( "unavailable monitor {}".format(unavailable)) elif key == "bgpstreamlive": if not info or not set(info).issubset( self.available_bgpstreamlive): raise ArtemisError("invalid-bgpstreamlive-project", info) elif key == "exabgp": for entry in info: if "ip" not in entry and "port" not in entry: raise ArtemisError("invalid-exabgp-info", entry) if entry["ip"] != "exabgp": try: str2ip(entry["ip"]) except Exception: raise ArtemisError("invalid-exabgp-ip", entry["ip"]) if not isinstance(entry["port"], int): raise ArtemisError("invalid-exabgp-port", entry["port"])
def __check_prefixes(_prefixes): for prefix_group, prefixes in _prefixes.items(): for prefix in prefixes: if translate_rfc2622(prefix, just_match=True): continue try: str2ip(prefix) except Exception: raise ArtemisError("invalid-prefix", prefix)
def translate_rfc2622(input_prefix, just_match=False): """ :param input_prefix: (str) input IPv4/IPv6 prefix that should be translated according to RFC2622 :param just_match: (bool) check only if the prefix has matched instead of translating :return: output_prefixes: (iterator of str) output IPv4/IPv6 prefixes, if not just_match, otherwise True or False """ # ^- is the exclusive more specifics operator; it stands for the more # specifics of the address prefix excluding the address prefix # itself. For example, 128.9.0.0/16^- contains all the more # specifics of 128.9.0.0/16 excluding 128.9.0.0/16. reg_exclusive = re.match(r"^(\S*)\^-$", input_prefix) if reg_exclusive: matched_prefix = reg_exclusive.group(1) if valid_prefix(matched_prefix): matched_prefix_ip = str2ip(matched_prefix) min_length = matched_prefix_ip.prefixlen + 1 max_length = matched_prefix_ip.max_prefixlen if just_match: return True return calculate_more_specifics(matched_prefix_ip, min_length, max_length) # ^+ is the inclusive more specifics operator; it stands for the more # specifics of the address prefix including the address prefix # itself. For example, 5.0.0.0/8^+ contains all the more specifics # of 5.0.0.0/8 including 5.0.0.0/8. reg_inclusive = re.match(r"^(\S*)\^\+$", input_prefix) if reg_inclusive: matched_prefix = reg_inclusive.group(1) if valid_prefix(matched_prefix): matched_prefix_ip = str2ip(matched_prefix) min_length = matched_prefix_ip.prefixlen max_length = matched_prefix_ip.max_prefixlen if just_match: return True return calculate_more_specifics(matched_prefix_ip, min_length, max_length) # ^n where n is an integer, stands for all the length n specifics of # the address prefix. For example, 30.0.0.0/8^16 contains all the # more specifics of 30.0.0.0/8 which are of length 16 such as # 30.9.0.0/16. reg_n = re.match(r"^(\S*)\^(\d+)$", input_prefix) if reg_n: matched_prefix = reg_n.group(1) length = int(reg_n.group(2)) if valid_prefix(matched_prefix): matched_prefix_ip = str2ip(matched_prefix) min_length = length max_length = length if min_length < matched_prefix_ip.prefixlen: raise ArtemisError("invalid-n-small", input_prefix) if max_length > matched_prefix_ip.max_prefixlen: raise ArtemisError("invalid-n-large", input_prefix) if just_match: return True return list( map( str, calculate_more_specifics(matched_prefix_ip, min_length, max_length), )) # ^n-m where n and m are integers, stands for all the length n to # length m specifics of the address prefix. For example, # 30.0.0.0/8^24-32 contains all the more specifics of 30.0.0.0/8 # which are of length 24 to 32 such as 30.9.9.96/28. reg_n_m = re.match(r"^(\S*)\^(\d+)-(\d+)$", input_prefix) if reg_n_m: matched_prefix = reg_n_m.group(1) min_length = int(reg_n_m.group(2)) max_length = int(reg_n_m.group(3)) if valid_prefix(matched_prefix): matched_prefix_ip = str2ip(matched_prefix) if min_length < matched_prefix_ip.prefixlen: raise ArtemisError("invalid-n-small", input_prefix) if max_length > matched_prefix_ip.max_prefixlen: raise ArtemisError("invalid-n-large", input_prefix) if just_match: return True return calculate_more_specifics(matched_prefix_ip, min_length, max_length) # nothing has matched if just_match: return False return [input_prefix]
def valid_prefix(input_prefix): try: str2ip(input_prefix) except Exception: return False return True
def valid_prefix(self): try: str2ip(self.msg["prefix"]) except Exception: return False return True
dest="info_hijack", type=str, help="hijack event information", required=True, ) args = parser.parse_args() # info_hijack = { # "key": <hijack_key>, # "prefix": <prefix> # } try: info_hijack = json.loads(args.info_hijack) log.info("Preparing to mitigate via deaggregation hijack {}".format( info_hijack)) hijacked_prefix = str2ip(info_hijack["prefix"]) hijacked_prefix_len = hijacked_prefix.prefixlen deagg_len_threshold = 24 if hijacked_prefix.version == 6: deagg_len_threshold = 64 if hijacked_prefix_len < deagg_len_threshold: subnets = list(map(str, list(hijacked_prefix.subnets()))) log.info("Subnets to announce: {}".format(subnets)) for subnet in subnets: exa_command = "announce route {} next-hop self".format(subnet) sio = SocketIO("http://" + EXA_ROUTE_COMMAND_HOST, namespace=BaseNamespace) sio.connect() sio.emit("route_command", {"command": exa_command}) sio.disconnect() else:
def valid_prefix(msg): try: str2ip(msg['prefix']) except BaseException: return False return True
def __check_rules(self, _rules): for rule in _rules: for field in rule: if field not in self.supported_fields: log.warning("unsupported field found {} in {}".format( field, rule)) rule["prefixes"] = flatten(rule["prefixes"]) for prefix in rule["prefixes"]: if translate_rfc2622(prefix, just_match=True): continue try: str2ip(prefix) except Exception: raise ArtemisError("invalid-prefix", prefix) rule["origin_asns"] = flatten(rule.get("origin_asns", [])) if rule["origin_asns"] == ["*"]: rule["origin_asns"] = [-1] rule["neighbors"] = flatten(rule.get("neighbors", [])) if rule["neighbors"] == ["*"]: rule["neighbors"] = [-1] rule["mitigation"] = flatten(rule.get("mitigation", "manual")) rule["policies"] = flatten(rule.get("policies", [])) rule["community_annotations"] = rule.get( "community_annotations", []) if not isinstance(rule["community_annotations"], list): raise ArtemisError("invalid-outer-list-comm-annotations", "") seen_community_annotations = set() for annotation_entry_outer in rule["community_annotations"]: if not isinstance(annotation_entry_outer, dict): raise ArtemisError("invalid-dict-comm-annotations", "") for annotation in annotation_entry_outer: if annotation in seen_community_annotations: raise ArtemisError( "duplicate-community-annotation", annotation) seen_community_annotations.add(annotation) if not isinstance(annotation_entry_outer[annotation], list): raise ArtemisError( "invalid-inner-list-comm-annotations", annotation) for annotation_entry_inner in annotation_entry_outer[ annotation]: for key in annotation_entry_inner: if key not in ["in", "out"]: raise ArtemisError( "invalid-community-annotation-key", key) in_communities = flatten( annotation_entry_inner.get("in", [])) for community in in_communities: if not re.match(r"\d+\:\d+", community): raise ArtemisError("invalid-bgp-community", community) out_communities = flatten( annotation_entry_inner.get("out", [])) for community in out_communities: if not re.match(r"\d+\:\d+", community): raise ArtemisError("invalid-bgp-community", community) for asn in rule["origin_asns"] + rule["neighbors"]: if translate_asn_range(asn, just_match=True): continue if not isinstance(asn, int): raise ArtemisError("invalid-asn", asn)
def check(self, data: Text) -> Dict: """ Checks if all sections and fields are defined correctly in the parsed configuration. Raises custom exceptions in case a field or section is misdefined. """ for section in data: if section not in self.sections: raise ArtemisError('invalid-section', section) data['prefixes'] = { k: flatten(v) for k, v in data['prefixes'].items() } for prefix_group, prefixes in data['prefixes'].items(): for prefix in prefixes: try: str2ip(prefix) except Exception: raise ArtemisError('invalid-prefix', prefix) for rule in data['rules']: for field in rule: if field not in self.supported_fields: log.warning('unsupported field found {} in {}'.format( field, rule)) rule['prefixes'] = flatten(rule['prefixes']) for prefix in rule['prefixes']: try: str2ip(prefix) except Exception: raise ArtemisError('invalid-prefix', prefix) rule['origin_asns'] = flatten(rule.get('origin_asns', [])) rule['neighbors'] = flatten(rule.get('neighbors', [])) rule['mitigation'] = flatten(rule.get('mitigation', 'manual')) for asn in (rule['origin_asns'] + rule['neighbors']): if not isinstance(asn, int): raise ArtemisError('invalid-asn', asn) for key, info in data['monitors'].items(): if key not in self.supported_monitors: raise ArtemisError('invalid-monitor', key) elif key == 'riperis': for unavailable in set(info).difference( self.available_ris): log.warning( 'unavailable monitor {}'.format(unavailable)) elif key == 'bgpstreamlive': if len(info) == 0 or not set(info).issubset( self.available_bgpstreamlive): raise ArtemisError('invalid-bgpstreamlive-project', info) elif key == 'exabgp': for entry in info: if 'ip' not in entry and 'port' not in entry: raise ArtemisError('invalid-exabgp-info', entry) if entry['ip'] != 'exabgp': try: str2ip(entry['ip']) except Exception: raise ArtemisError('invalid-exabgp-ip', entry['ip']) if not isinstance(entry['port'], int): raise ArtemisError('invalid-exabgp-port', entry['port']) data['asns'] = {k: flatten(v) for k, v in data['asns'].items()} for name, asns in data['asns'].items(): for asn in asns: if not isinstance(asn, int): raise ArtemisError('invalid-asn', asn) return data
def check(self, data: Text) -> Dict: """ Checks if all sections and fields are defined correctly in the parsed configuration. Raises custom exceptions in case a field or section is misdefined. """ for section in data: if section not in self.sections: raise ArtemisError("invalid-section", section) data["prefixes"] = {k: flatten(v) for k, v in data["prefixes"].items()} for prefix_group, prefixes in data["prefixes"].items(): for prefix in prefixes: if translate_rfc2622(prefix, just_match=True): continue try: str2ip(prefix) except Exception: raise ArtemisError("invalid-prefix", prefix) for rule in data["rules"]: for field in rule: if field not in self.supported_fields: log.warning( "unsupported field found {} in {}".format(field, rule) ) rule["prefixes"] = flatten(rule["prefixes"]) for prefix in rule["prefixes"]: if translate_rfc2622(prefix, just_match=True): continue try: str2ip(prefix) except Exception: raise ArtemisError("invalid-prefix", prefix) rule["origin_asns"] = flatten(rule.get("origin_asns", [])) if rule["origin_asns"] == ["*"]: rule["origin_asns"] = [-1] rule["neighbors"] = flatten(rule.get("neighbors", [])) if rule["neighbors"] == ["*"]: rule["neighbors"] = [-1] rule["mitigation"] = flatten(rule.get("mitigation", "manual")) rule["policies"] = flatten(rule.get("policies", [])) rule["community_annotations"] = rule.get("community_annotations", []) if not isinstance(rule["community_annotations"], list): raise ArtemisError("invalid-outer-list-comm-annotations", "") seen_community_annotations = set() for annotation_entry_outer in rule["community_annotations"]: if not isinstance(annotation_entry_outer, dict): raise ArtemisError("invalid-dict-comm-annotations", "") for annotation in annotation_entry_outer: if annotation in seen_community_annotations: raise ArtemisError( "duplicate-community-annotation", annotation ) seen_community_annotations.add(annotation) if not isinstance(annotation_entry_outer[annotation], list): raise ArtemisError( "invalid-inner-list-comm-annotations", annotation ) for annotation_entry_inner in annotation_entry_outer[ annotation ]: for key in annotation_entry_inner: if key not in ["in", "out"]: raise ArtemisError( "invalid-community-annotation-key", key ) in_communities = flatten( annotation_entry_inner.get("in", []) ) for community in in_communities: if not re.match(r"\d+\:\d+", community): raise ArtemisError( "invalid-bgp-community", community ) out_communities = flatten( annotation_entry_inner.get("out", []) ) for community in out_communities: if not re.match(r"\d+\:\d+", community): raise ArtemisError( "invalid-bgp-community", community ) for asn in rule["origin_asns"] + rule["neighbors"]: if translate_asn_range(asn, just_match=True): continue if not isinstance(asn, int): raise ArtemisError("invalid-asn", asn) if "monitors" in data: for key, info in data["monitors"].items(): if key not in self.supported_monitors: raise ArtemisError("invalid-monitor", key) elif key == "riperis": for unavailable in set(info).difference(self.available_ris): log.warning("unavailable monitor {}".format(unavailable)) elif key == "bgpstreamlive": if not info or not set(info).issubset( self.available_bgpstreamlive ): raise ArtemisError("invalid-bgpstreamlive-project", info) elif key == "exabgp": for entry in info: if "ip" not in entry and "port" not in entry: raise ArtemisError("invalid-exabgp-info", entry) if entry["ip"] != "exabgp": try: str2ip(entry["ip"]) except Exception: raise ArtemisError("invalid-exabgp-ip", entry["ip"]) if not isinstance(entry["port"], int): raise ArtemisError("invalid-exabgp-port", entry["port"]) data["asns"] = {k: flatten(v) for k, v in data["asns"].items()} for name, asns in data["asns"].items(): for asn in asns: if translate_asn_range(asn, just_match=True): continue if not isinstance(asn, int): raise ArtemisError("invalid-asn", asn) return data