def auto_ignore_check_rule(self, key): rule = self.autoignore_rules.get(key, None) if not rule: return if rule["interval"] <= 0: return thres_num_peers_seen = rule["thres_num_peers_seen"] thres_num_ases_infected = rule["thres_num_ases_infected"] interval = rule["interval"] log.debug("Checking autoignore rule {}".format(rule)) try: # fetch ongoing hijack events query = ( "SELECT time_started, time_last, num_peers_seen, " "num_asns_inf, key, prefix, hijack_as, type, time_detected " "FROM hijacks WHERE active = true") entries = self.ro_db.execute(query) # check which of them should be auto-ignored time_now = int(time.time()) for entry in entries: prefix = entry[5] best_node_match = self.find_best_prefix_node(prefix) if not best_node_match: continue if best_node_match["rule_key"] != key: continue log.debug("Matched prefix {}".format(prefix)) time_last_updated = max(int(entry[1].timestamp()), int(entry[8].timestamp())) num_peers_seen = int(entry[2]) num_asns_inf = int(entry[3]) hij_key = entry[4] hijack_as = entry[6] hij_type = entry[7] if ((time_now - time_last_updated > interval) and (num_peers_seen < thres_num_peers_seen) and (num_asns_inf < thres_num_ases_infected)): redis_hijack_key = redis_key(prefix, hijack_as, hij_type) # if ongoing, clear redis if self.redis.sismember("persistent-keys", hij_key): purge_redis_eph_pers_keys(self.redis, redis_hijack_key, hij_key) self.wo_db.execute( "UPDATE hijacks SET active=false, dormant=false, under_mitigation=false, seen=false, ignored=true WHERE key=%s;", (hij_key, ), ) log.debug("Ignored hijack {}".format(entry)) except Exception: log.exception("exception") finally: self.rule_timer_threads[key] = Timer( interval=self.autoignore_rules[key]["interval"], function=self.auto_ignore_check_rule, args=[key], ) self.rule_timer_threads[key].start() log.debug("Started timer {} - {}".format( self.rule_timer_threads[key], self.autoignore_rules[key]))
def handle_bgp_update(self, message: Dict) -> NoReturn: """ Callback function that runs the main logic of detecting hijacks for every bgp update. """ # log.debug('{}'.format(message)) if isinstance(message, dict): monitor_event = message else: message.ack() monitor_event = message.payload monitor_event["path"] = monitor_event["as_path"] monitor_event["timestamp"] = datetime( *map(int, re.findall(r"\d+", monitor_event["timestamp"])) ).timestamp() raw = monitor_event.copy() # mark the initial redis hijack key since it may change upon # outdated checks if "hij_key" in monitor_event: monitor_event["initial_redis_hijack_key"] = redis_key( monitor_event["prefix"], monitor_event["hijack_as"], monitor_event["hij_type"], ) is_hijack = False if monitor_event["type"] == "A": monitor_event["path"] = Detection.Worker.__clean_as_path( monitor_event["path"] ) ip_version = get_ip_version(monitor_event["prefix"]) if monitor_event["prefix"] in self.prefix_tree[ip_version]: prefix_node = self.prefix_tree[ip_version][monitor_event["prefix"]] monitor_event["matched_prefix"] = prefix_node["prefix"] try: path_hijacker = -1 pol_hijacker = -1 hij_dimensions = [ "-", "-", "-", "-", ] # prefix, path, dplane, policy hij_dimension_index = 0 for func_dim in self.__hijack_dimension_checker_gen(): if hij_dimension_index == 0: # prefix dimension for func_pref in func_dim(): hij_dimensions[hij_dimension_index] = func_pref( monitor_event, prefix_node ) if hij_dimensions[hij_dimension_index] != "-": break elif hij_dimension_index == 1: # path type dimension for func_path in func_dim(len(monitor_event["path"])): ( path_hijacker, hij_dimensions[hij_dimension_index], ) = func_path(monitor_event, prefix_node) if hij_dimensions[hij_dimension_index] != "-": break elif hij_dimension_index == 2: # data plane dimension for func_dplane in func_dim(): hij_dimensions[hij_dimension_index] = func_dplane( monitor_event, prefix_node ) if hij_dimensions[hij_dimension_index] != "-": break elif hij_dimension_index == 3: # policy dimension for func_pol in func_dim(len(monitor_event["path"])): ( pol_hijacker, hij_dimensions[hij_dimension_index], ) = func_pol(monitor_event, prefix_node) if hij_dimensions[hij_dimension_index] != "-": break hij_dimension_index += 1 # check if dimension combination in hijack combinations # and commit hijack if hij_dimensions in HIJACK_DIM_COMBINATIONS: is_hijack = True # show pol hijacker only if the path hijacker is uncertain hijacker = path_hijacker if path_hijacker == -1 and pol_hijacker != -1: hijacker = pol_hijacker self.commit_hijack(monitor_event, hijacker, hij_dimensions) except Exception: log.exception("exception") outdated_hijack = None if not is_hijack and "hij_key" in monitor_event: try: # outdated hijack, benign from now on redis_hijack_key = redis_key( monitor_event["prefix"], monitor_event["hijack_as"], monitor_event["hij_type"], ) outdated_hijack = self.redis.get(redis_hijack_key) purge_redis_eph_pers_keys( self.redis, redis_hijack_key, monitor_event["hij_key"] ) # mark in DB only if it is the first time this hijack was purged (pre-existent in redis) if outdated_hijack: self.mark_outdated( monitor_event["hij_key"], redis_hijack_key ) except Exception: log.exception("exception") elif ( is_hijack and "hij_key" in monitor_event and monitor_event["initial_redis_hijack_key"] != monitor_event["final_redis_hijack_key"] ): try: outdated_hijack = self.redis.get( monitor_event["initial_redis_hijack_key"] ) # outdated hijack, but still a hijack; need key change purge_redis_eph_pers_keys( self.redis, monitor_event["initial_redis_hijack_key"], monitor_event["hij_key"], ) # mark in DB only if it is the first time this hijack was purged (pre-existsent in redis) if outdated_hijack: self.mark_outdated( monitor_event["hij_key"], monitor_event["initial_redis_hijack_key"], ) except Exception: log.exception("exception") elif not is_hijack: self.gen_implicit_withdrawal(monitor_event) self.mark_handled(raw) if outdated_hijack: try: outdated_hijack = json.loads(outdated_hijack) outdated_hijack["end_tag"] = "outdated" mail_log.info( "{}".format( json.dumps( hijack_log_field_formatter(outdated_hijack), indent=4, ) ), extra={ "community_annotation": outdated_hijack.get( "community_annotation", "NA" ) }, ) hij_log.info( "{}".format( json.dumps(hijack_log_field_formatter(outdated_hijack)) ), extra={ "community_annotation": outdated_hijack.get( "community_annotation", "NA" ) }, ) except Exception: log.exception("exception") elif monitor_event["type"] == "W": self.producer.publish( { "prefix": monitor_event["prefix"], "peer_asn": monitor_event["peer_asn"], "timestamp": monitor_event["timestamp"], "key": monitor_event["key"], }, exchange=self.update_exchange, routing_key="withdraw", priority=0, serializer="ujson", )
def commit_hijack( self, monitor_event: Dict, hijacker: int, hij_dimensions: List[str] ) -> NoReturn: """ Commit new or update an existing hijack to the database. It uses redis server to store ongoing hijacks information to not stress the db. """ hij_type = "|".join(hij_dimensions) redis_hijack_key = redis_key(monitor_event["prefix"], hijacker, hij_type) if "hij_key" in monitor_event: monitor_event["final_redis_hijack_key"] = redis_hijack_key hijack_value = { "prefix": monitor_event["prefix"], "hijack_as": hijacker, "type": hij_type, "time_started": monitor_event["timestamp"], "time_last": monitor_event["timestamp"], "peers_seen": {monitor_event["peer_asn"]}, "monitor_keys": {monitor_event["key"]}, "configured_prefix": monitor_event["matched_prefix"], "timestamp_of_config": self.timestamp, "end_tag": None, "outdated_parent": None, "rpki_status": "NA", } if ( RPKI_VALIDATOR_ENABLED == "true" and self.rtrmanager and monitor_event["path"] ): try: asn = monitor_event["path"][-1] if "/" in monitor_event["prefix"]: network, netmask = monitor_event["prefix"].split("/") # /32 or /128 else: ip_version = get_ip_version(monitor_event["prefix"]) network = monitor_event["prefix"] netmask = 32 if ip_version == "v6": netmask = 128 redis_rpki_asn_prefix_key = "rpki_as{}_p{}".format( asn, monitor_event["prefix"] ) redis_rpki_status = self.redis.get(redis_rpki_asn_prefix_key) if not redis_rpki_status: rpki_status = get_rpki_val_result( self.rtrmanager, asn, network, int(netmask) ) else: rpki_status = redis_rpki_status.decode() hijack_value["rpki_status"] = rpki_status # the default refresh interval for the RPKI RTR manager is 3600 seconds self.redis.set(redis_rpki_asn_prefix_key, rpki_status, ex=3600) except Exception: log.exception("exception") if ( "hij_key" in monitor_event and monitor_event["initial_redis_hijack_key"] != monitor_event["final_redis_hijack_key"] ): hijack_value["outdated_parent"] = monitor_event["hij_key"] # identify the number of infected ases hijack_value["asns_inf"] = set() if hij_dimensions[1] in {"0", "1"}: hijack_value["asns_inf"] = set( monitor_event["path"][: -(int(hij_dimensions[1]) + 1)] ) elif hij_dimensions[3] == "L": hijack_value["asns_inf"] = set(monitor_event["path"][:-2]) # assume the worst-case scenario of a type-2 hijack elif len(monitor_event["path"]) > 2: hijack_value["asns_inf"] = set(monitor_event["path"][:-3]) # make the following operation atomic using blpop (blocking) # first, make sure that the semaphore is initialized if self.redis.getset("{}token_active".format(redis_hijack_key), 1) != b"1": redis_pipeline = self.redis.pipeline() redis_pipeline.lpush("{}token".format(redis_hijack_key), "token") # lock, by extracting the token (other processes that access # it at the same time will be blocked) # attention: it is important that this command is batched in the # pipeline since the db may async delete # the token redis_pipeline.blpop("{}token".format(redis_hijack_key)) redis_pipeline.execute() else: # lock, by extracting the token (other processes that access it # at the same time will be blocked) token = self.redis.blpop("{}token".format(redis_hijack_key), timeout=60) # if timeout after 60 seconds, return without hijack alert # since this means that sth has been purged in the meanwhile (e.g., due to outdated hijack # in another instance; a detector cannot be stuck for a whole minute in a single hijack BGP update) if not token: log.info( "Monitor event {} encountered redis token timeout and will be cleared as benign for hijack {}".format( str(monitor_event), redis_hijack_key ) ) return # proceed now that we have clearance redis_pipeline = self.redis.pipeline() try: result = self.redis.get(redis_hijack_key) if result: result = json.loads(result) result["time_started"] = min( result["time_started"], hijack_value["time_started"] ) result["time_last"] = max( result["time_last"], hijack_value["time_last"] ) result["peers_seen"] = set(result["peers_seen"]) result["peers_seen"].update(hijack_value["peers_seen"]) result["asns_inf"] = set(result["asns_inf"]) result["asns_inf"].update(hijack_value["asns_inf"]) # no update since db already knows! result["monitor_keys"] = hijack_value["monitor_keys"] self.comm_annotate_hijack(monitor_event, result) result["outdated_parent"] = hijack_value["outdated_parent"] result["bgpupdate_keys"] = set(result["bgpupdate_keys"]) result["bgpupdate_keys"].add(monitor_event["key"]) result["rpki_status"] = hijack_value["rpki_status"] else: hijack_value["time_detected"] = time.time() hijack_value["key"] = get_hash( [ monitor_event["prefix"], hijacker, hij_type, "{0:.6f}".format(hijack_value["time_detected"]), ] ) hijack_value["bgpupdate_keys"] = {monitor_event["key"]} redis_pipeline.sadd("persistent-keys", hijack_value["key"]) result = hijack_value self.comm_annotate_hijack(monitor_event, result) mail_log.info( "{}".format( json.dumps(hijack_log_field_formatter(result), indent=4) ), extra={ "community_annotation": result.get( "community_annotation", "NA" ) }, ) redis_pipeline.set(redis_hijack_key, json.dumps(result)) # store the origin, neighbor combination for this hijack BGP update origin = None neighbor = None if monitor_event["path"]: origin = monitor_event["path"][-1] if len(monitor_event["path"]) > 1: neighbor = monitor_event["path"][-2] redis_pipeline.sadd( "hij_orig_neighb_{}".format(redis_hijack_key), "{}_{}".format(origin, neighbor), ) # store the prefix and peer ASN for this hijack BGP update redis_pipeline.sadd( "prefix_{}_peer_{}_hijacks".format( monitor_event["prefix"], monitor_event["peer_asn"] ), redis_hijack_key, ) redis_pipeline.sadd( "hijack_{}_prefixes_peers".format(redis_hijack_key), "{}_{}".format(monitor_event["prefix"], monitor_event["peer_asn"]), ) except Exception: log.exception("exception") finally: # execute whatever has been accumulated in redis till now redis_pipeline.execute() # publish hijack self.publish_hijack_fun(result, redis_hijack_key) hij_log.info( "{}".format(json.dumps(hijack_log_field_formatter(result))), extra={ "community_annotation": result.get("community_annotation", "NA") }, ) # unlock, by pushing back the token (at most one other process # waiting will be unlocked) redis_pipeline = self.redis.pipeline() redis_pipeline.set("{}token_active".format(redis_hijack_key), 1) redis_pipeline.lpush("{}token".format(redis_hijack_key), "token") redis_pipeline.execute()
def commit_hijack(self, monitor_event: Dict, hijacker: int, hij_dimensions: List[str]) -> NoReturn: """ Commit new or update an existing hijack to the database. It uses redis server to store ongoing hijacks information to not stress the db. """ hij_type = "|".join(hij_dimensions) redis_hijack_key = redis_key(monitor_event["prefix"], hijacker, hij_type) if "hij_key" in monitor_event: monitor_event["final_redis_hijack_key"] = redis_hijack_key return hijack_value = { "prefix": monitor_event["prefix"], "hijack_as": hijacker, "type": hij_type, "time_started": monitor_event["timestamp"], "time_last": monitor_event["timestamp"], "peers_seen": {monitor_event["peer_asn"]}, "monitor_keys": {monitor_event["key"]}, "configured_prefix": monitor_event["matched_prefix"], "timestamp_of_config": self.timestamp, } # identify the number of infected ases hijack_value["asns_inf"] = set() if hij_dimensions[1] in {"0", "1"}: hijack_value["asns_inf"] = set( monitor_event["path"][:-(int(hij_dimensions[1]) + 1)]) elif hij_dimensions[3] == "L": hijack_value["asns_inf"] = set(monitor_event["path"][:-2]) # assume the worst-case scenario of a type-2 hijack elif len(monitor_event["path"]) > 2: hijack_value["asns_inf"] = set(monitor_event["path"][:-3]) # make the following operation atomic using blpop (blocking) # first, make sure that the semaphore is initialized if self.redis.getset("{}token_active".format(redis_hijack_key), 1) != b"1": redis_pipeline = self.redis.pipeline() redis_pipeline.lpush("{}token".format(redis_hijack_key), "token") # lock, by extracting the token (other processes that access # it at the same time will be blocked) # attention: it is important that this command is batched in the # pipeline since the db may async delete # the token redis_pipeline.blpop("{}token".format(redis_hijack_key)) redis_pipeline.execute() else: # lock, by extracting the token (other processes that access it # at the same time will be blocked) token = self.redis.blpop("{}token".format(redis_hijack_key), timeout=60) # if timeout after 60 seconds, return without hijack alert # since this means that sth has been purged in the meanwhile (e.g., due to outdated hijack # in another instance; a detector cannot be stuck for a whole minute in a single hijack BGP update) if not token: log.info( "Monitor event {} encountered redis token timeout and will be cleared as benign for hijack {}" .format(str(monitor_event), redis_hijack_key)) return # proceed now that we have clearance redis_pipeline = self.redis.pipeline() try: result = self.redis.get(redis_hijack_key) if result: result = yaml.safe_load(result) result["time_started"] = min(result["time_started"], hijack_value["time_started"]) result["time_last"] = max(result["time_last"], hijack_value["time_last"]) result["peers_seen"].update(hijack_value["peers_seen"]) result["asns_inf"].update(hijack_value["asns_inf"]) # no update since db already knows! result["monitor_keys"] = hijack_value["monitor_keys"] self.comm_annotate_hijack(monitor_event, result) else: hijack_value["time_detected"] = time.time() hijack_value["key"] = get_hash([ monitor_event["prefix"], hijacker, hij_type, "{0:.6f}".format(hijack_value["time_detected"]), ]) redis_pipeline.sadd("persistent-keys", hijack_value["key"]) result = hijack_value self.comm_annotate_hijack(monitor_event, result) mail_log.info( "{}".format( json.dumps(result, indent=4, cls=SetEncoder)), extra={ "community_annotation": result.get("community_annotation", "NA") }, ) redis_pipeline.set(redis_hijack_key, yaml.dump(result)) # store the origin, neighbor combination for this hijack BGP update origin = None neighbor = None if monitor_event["path"]: origin = monitor_event["path"][-1] if len(monitor_event["path"]) > 1: neighbor = monitor_event["path"][-2] redis_pipeline.sadd( "hij_orig_neighb_{}".format(redis_hijack_key), "{}_{}".format(origin, neighbor), ) # store the prefix and peer ASN for this hijack BGP update redis_pipeline.sadd( "prefix_{}_peer_{}_hijacks".format( monitor_event["prefix"], monitor_event["peer_asn"]), redis_hijack_key, ) redis_pipeline.sadd( "hijack_{}_prefixes_peers".format(redis_hijack_key), "{}_{}".format(monitor_event["prefix"], monitor_event["peer_asn"]), ) except Exception: log.exception("exception") finally: # unlock, by pushing back the token (at most one other process # waiting will be unlocked) redis_pipeline.set("{}token_active".format(redis_hijack_key), 1) redis_pipeline.lpush("{}token".format(redis_hijack_key), "token") redis_pipeline.execute() self.producer.publish( result, exchange=self.hijack_exchange, routing_key="update", serializer="yaml", priority=0, ) self.producer.publish( result, exchange=self.hijack_hashing, routing_key=redis_hijack_key, serializer="yaml", priority=0, ) hij_log.info( "{}".format(json.dumps(result, indent=4, cls=SetEncoder)), extra={ "community_annotation": result.get("community_annotation", "NA") }, )
def handle_bgp_update(self, message: Dict) -> NoReturn: """ Callback function that runs the main logic of detecting hijacks for every bgp update. """ # log.debug('{}'.format(message)) if isinstance(message, dict): monitor_event = message else: monitor_event = json.loads(message.payload) monitor_event["path"] = monitor_event["as_path"] monitor_event["timestamp"] = datetime( *map(int, re.findall( r"\d+", monitor_event["timestamp"]))).timestamp() raw = monitor_event.copy() # mark the initial redis hijack key since it may change upon # outdated checks if "hij_key" in monitor_event: monitor_event["initial_redis_hijack_key"] = redis_key( monitor_event["prefix"], monitor_event["hijack_as"], monitor_event["hij_type"], ) is_hijack = False if monitor_event["type"] == "A": monitor_event["path"] = Detection.Worker.__clean_as_path( monitor_event["path"]) prefix_node = self.prefix_tree.search_best( monitor_event["prefix"]) if prefix_node: monitor_event["matched_prefix"] = prefix_node.prefix try: path_hijacker = -1 pol_hijacker = -1 hij_dimensions = [ "-", "-", "-", "-", ] # prefix, path, dplane, policy hij_dimension_index = 0 for func_dim in self.__hijack_dimension_checker_gen(): if hij_dimension_index == 0: # prefix dimension for func_pref in func_dim(): hij_dimensions[ hij_dimension_index] = func_pref( monitor_event, prefix_node) if hij_dimensions[ hij_dimension_index] != "-": break elif hij_dimension_index == 1: # path type dimension for func_path in func_dim( len(monitor_event["path"])): ( path_hijacker, hij_dimensions[hij_dimension_index], ) = func_path(monitor_event, prefix_node) if hij_dimensions[ hij_dimension_index] != "-": break elif hij_dimension_index == 2: # data plane dimension for func_dplane in func_dim(): hij_dimensions[ hij_dimension_index] = func_dplane( monitor_event, prefix_node) if hij_dimensions[ hij_dimension_index] != "-": break elif hij_dimension_index == 3: # policy dimension for func_pol in func_dim( len(monitor_event["path"])): ( pol_hijacker, hij_dimensions[hij_dimension_index], ) = func_pol(monitor_event, prefix_node) if hij_dimensions[ hij_dimension_index] != "-": break hij_dimension_index += 1 # check if dimension combination in hijack combinations # and commit hijack if hij_dimensions in HIJACK_DIM_COMBINATIONS: is_hijack = True # show pol hijacker only if the path hijacker is uncertain hijacker = path_hijacker if path_hijacker == -1 and pol_hijacker != -1: hijacker = pol_hijacker self.commit_hijack(monitor_event, hijacker, hij_dimensions) except Exception: log.exception("exception") if (not is_hijack and "hij_key" in monitor_event) or ( is_hijack and "hij_key" in monitor_event and monitor_event["initial_redis_hijack_key"] != monitor_event["final_redis_hijack_key"]): redis_hijack_key = redis_key( monitor_event["prefix"], monitor_event["hijack_as"], monitor_event["hij_type"], ) purge_redis_eph_pers_keys(self.redis, redis_hijack_key, monitor_event["hij_key"]) self.mark_outdated(monitor_event["hij_key"], redis_hijack_key) elif not is_hijack: self.gen_implicit_withdrawal(monitor_event) self.mark_handled(raw) elif monitor_event["type"] == "W": self.producer.publish( { "prefix": monitor_event["prefix"], "peer_asn": monitor_event["peer_asn"], "timestamp": monitor_event["timestamp"], "key": monitor_event["key"], }, exchange=self.update_exchange, routing_key="withdraw", priority=0, )
def commit_hijack(self, monitor_event: Dict, hijacker: int, hij_type: str) -> NoReturn: """ Commit new or update an existing hijack to the database. It uses redis server to store ongoing hijacks information to not stress the db. """ redis_hijack_key = redis_key(monitor_event['prefix'], hijacker, hij_type) if 'hij_key' in monitor_event: monitor_event['final_redis_hijack_key'] = redis_hijack_key return hijack_value = { 'prefix': monitor_event['prefix'], 'hijack_as': hijacker, 'type': hij_type, 'time_started': monitor_event['timestamp'], 'time_last': monitor_event['timestamp'], 'peers_seen': {monitor_event['peer_asn']}, 'monitor_keys': {monitor_event['key']}, 'configured_prefix': monitor_event['matched_prefix'], 'timestamp_of_config': self.timestamp } hijack_value['asns_inf'] = set() # for squatting, all ASes except the origin are considered infected if hij_type == 'Q': if len(monitor_event['path']) > 0: hijack_value['asns_inf'] = set(monitor_event['path'][:-1]) # for sub-prefix hijacks, the infection depends on whether the hijacker is the origin/neighbor/sth else elif hij_type == 'S': if len(monitor_event['path']) > 1: if hijacker == monitor_event['path'][-1]: hijack_value['asns_inf'] = set( monitor_event['path'][:-1]) elif hijacker == monitor_event['path'][-2]: hijack_value['asns_inf'] = set( monitor_event['path'][:-2]) else: # assume the hijacker does a Type-2 if len(monitor_event['path']) > 2: hijack_value['asns_inf'] = set( monitor_event['path'][:-3]) # for exact-prefix type-0/type-1 hijacks, the pollution depends on the type else: hijack_value['asns_inf'] = set( monitor_event['path'][:-(int(hij_type) + 1)]) # make the following operation atomic using blpop (blocking) # first, make sure that the semaphore is initialized if self.redis.getset('{}token_active'.format(redis_hijack_key), 1) != b'1': redis_pipeline = self.redis.pipeline() redis_pipeline.lpush('{}token'.format(redis_hijack_key), 'token') # lock, by extracting the token (other processes that access it at the same time will be blocked) # attention: it is important that this command is batched in the pipeline since the db may async delete # the token redis_pipeline.blpop('{}token'.format(redis_hijack_key)) redis_pipeline.execute() else: # lock, by extracting the token (other processes that access it at the same time will be blocked) self.redis.blpop('{}token'.format(redis_hijack_key)) # proceed now that we have clearance redis_pipeline = self.redis.pipeline() try: result = self.redis.get(redis_hijack_key) if result is not None: result = pickle.loads(result) result['time_started'] = min(result['time_started'], hijack_value['time_started']) result['time_last'] = max(result['time_last'], hijack_value['time_last']) result['peers_seen'].update(hijack_value['peers_seen']) result['asns_inf'].update(hijack_value['asns_inf']) # no update since db already knows! result['monitor_keys'] = hijack_value['monitor_keys'] else: hijack_value['time_detected'] = time.time() hijack_value['key'] = hashlib.md5( pickle.dumps([ monitor_event['prefix'], hijacker, hij_type, hijack_value['time_detected'] ])).hexdigest() redis_pipeline.sadd('persistent-keys', hijack_value['key']) result = hijack_value mail_log.info('{}'.format(result)) redis_pipeline.set(redis_hijack_key, pickle.dumps(result)) except Exception: log.exception('exception') finally: # unlock, by pushing back the token (at most one other process waiting will be unlocked) redis_pipeline.set('{}token_active'.format(redis_hijack_key), 1) redis_pipeline.lpush('{}token'.format(redis_hijack_key), 'token') redis_pipeline.execute() self.producer.publish(result, exchange=self.hijack_exchange, routing_key='update', serializer='pickle', priority=0) hij_log.info('{}'.format(result))
def handle_bgp_update(self, message: Dict) -> NoReturn: """ Callback function that runs the main logic of detecting hijacks for every bgp update. """ # log.debug('{}'.format(message)) if isinstance(message, dict): monitor_event = message else: monitor_event = json.loads(message.payload) monitor_event['path'] = monitor_event['as_path'] monitor_event['timestamp'] = datetime( *map(int, re.findall( '\d+', monitor_event['timestamp']))).timestamp() if not self.redis.exists( monitor_event['key']) or 'hij_key' in monitor_event: raw = monitor_event.copy() # mark the initial redis hijack key since it may change upon outdated checks if 'hij_key' in monitor_event: monitor_event['initial_redis_hijack_key'] = redis_key( monitor_event['prefix'], monitor_event['hijack_as'], monitor_event['hij_type']) is_hijack = False # ignore withdrawals for now if monitor_event['type'] == 'A': monitor_event['path'] = Detection.Worker.__clean_as_path( monitor_event['path']) prefix_node = self.prefix_tree.search_best( monitor_event['prefix']) if prefix_node is not None: monitor_event['matched_prefix'] = prefix_node.prefix try: for func in self.__detection_generator( len(monitor_event['path'])): if func(monitor_event, prefix_node): is_hijack = True break except Exception: log.exception('exception') if ((not is_hijack and 'hij_key' in monitor_event) or (is_hijack and 'hij_key' in monitor_event and monitor_event['initial_redis_hijack_key'] != monitor_event['final_redis_hijack_key'])): redis_hijack_key = redis_key( monitor_event['prefix'], monitor_event['hijack_as'], monitor_event['hij_type']) purge_redis_eph_pers_keys(self.redis, redis_hijack_key, monitor_event['hij_key']) self.mark_outdated(monitor_event['hij_key'], redis_hijack_key) elif not is_hijack: self.mark_handled(raw) elif monitor_event['type'] == 'W': self.producer.publish( { 'prefix': monitor_event['prefix'], 'peer_asn': monitor_event['peer_asn'], 'timestamp': monitor_event['timestamp'], 'key': monitor_event['key'] }, exchange=self.update_exchange, routing_key='withdraw', priority=0) self.redis.set(monitor_event['key'], '', ex=60 * 60) else: log.debug('already handled {}'.format(monitor_event['key']))
def translate_learn_rule_msg_to_dicts(self, raw): """ Translates a learn rule message payload (raw) into ARTEMIS-compatible dictionaries :param raw: "key": <str>, "prefix": <str>, "type": <str>, "hijack_as": <int>, } :return: (<str>rule_prefix, <list><int>rule_asns, <list><dict>rules) """ # initialize dictionaries and lists rule_prefix = {} rule_asns = {} rules = [] try: # retrieve (origin, neighbor) combinations from redis redis_hijack_key = redis_key(raw["prefix"], raw["hijack_as"], raw["type"]) hij_orig_neighb_set = "hij_orig_neighb_{}".format( redis_hijack_key) orig_to_neighb = {} neighb_to_origs = {} asns = set() if self.redis.exists(hij_orig_neighb_set): for element in self.redis.sscan_iter(hij_orig_neighb_set): (origin_str, neighbor_str) = element.decode().split("_") origin = None if origin_str != "None": origin = int(origin_str) neighbor = None if neighbor_str != "None": neighbor = int(neighbor_str) if origin is not None: asns.add(origin) if origin not in orig_to_neighb: orig_to_neighb[origin] = set() if neighbor is not None: asns.add(neighbor) orig_to_neighb[origin].add(neighbor) if neighbor not in neighb_to_origs: neighb_to_origs[neighbor] = set() neighb_to_origs[neighbor].add(origin) # learned rule prefix rule_prefix = { raw["prefix"]: "LEARNED_H_{}_P_{}".format( raw["key"], raw["prefix"].replace("/", "_").replace(".", "_").replace( ":", "_"), ) } # learned rule asns rule_asns = {} for asn in sorted(list(asns)): rule_asns[asn] = "LEARNED_H_{}_AS_{}".format( raw["key"], asn) # learned rule(s) if re.match(r"^[E|S]\|0.*", raw["type"]): assert len(orig_to_neighb) == 1 assert raw["hijack_as"] in orig_to_neighb learned_rule = { "prefixes": [rule_prefix[raw["prefix"]]], "origin_asns": [rule_asns[raw["hijack_as"]]], "neighbors": [ rule_asns[asn] for asn in sorted(orig_to_neighb[raw["hijack_as"]]) ], "mitigation": "manual", } rules.append(learned_rule) elif re.match(r"^[E|S]\|1.*", raw["type"]): assert len(neighb_to_origs) == 1 assert raw["hijack_as"] in neighb_to_origs learned_rule = { "prefixes": [rule_prefix[raw["prefix"]]], "origin_asns": [ rule_asns[asn] for asn in sorted(neighb_to_origs[ raw["hijack_as"]]) ], "neighbors": [rule_asns[raw["hijack_as"]]], "mitigation": "manual", } rules.append(learned_rule) elif re.match(r"^[E|S]\|-.*", raw["type"]) or re.match( r"^Q\|0.*", raw["type"]): for origin in sorted(orig_to_neighb): learned_rule = { "prefixes": [rule_prefix[raw["prefix"]]], "origin_asns": [rule_asns[origin]], "neighbors": [ rule_asns[asn] for asn in sorted(orig_to_neighb[origin]) ], "mitigation": "manual", } rules.append(learned_rule) except Exception: log.exception("{}".format(raw)) return (None, None, None) return (rule_prefix, rule_asns, rules)