def check_socket_monitor_connection(): filter = Filter(filter_name="logs") pid = -1 filter.configure(FLOGS_CONFIG) filter.valgrind_start() try: with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as s: s.connect(filter.monitor) data = s.recv(4096).decode() s.close() if RESP_MON_STATUS_RUNNING not in data: logging.error( "check_socket_monitor_connection: Wrong response; got {}". format(data)) return False except Exception as e: logging.error( "check_socket_monitor_connection: Error connecting to socket: {}". format(e)) return False filter.stop() return True
def check_pid_file(): filter = Filter(filter_name="logs") pid = -1 filter.configure(FLOGS_CONFIG) filter.valgrind_start() try: with open(filter.pid) as f: pid = int(f.readline()) except Exception as e: logging.error("check_pid: Unable to read pid file: {}".format(e)) return False try: kill(pid, 0) except OSError as e: logging.error("check_pid: Process {} not running: {}".format(pid, e)) return False filter.stop() if access(filter.pid, F_OK): logging.error("check_pid: PID file not deleted") return False return True
def check_start_invalid_threshold_num(): filter = Filter(filter_name="logs", thresold="Kenobi") filter.configure(FLOGS_CONFIG) filter.valgrind_start() sleep(0.5) if filter.check_start(): logging.error( "check_start_invalid_threshold_num: Process started when threshold was invalid" ) filter.stop() return False return True
def check_start_invalid_cache_num(): filter = Filter(filter_name="logs", cache_size="General") filter.configure(FLOGS_CONFIG) filter.valgrind_start() sleep(0.5) if filter.check_start(): logging.error( "check_start_invalid_cache_num: Process started when cache size was invalid" ) filter.stop() return False return True
def check_start_invalid_thread_num(): filter = Filter(filter_name="logs", nb_threads="HelloThere") filter.configure(FLOGS_CONFIG) filter.valgrind_start() sleep(0.5) if filter.check_start(): logging.error( "check_start_invalid_thread_num: Process started when thread number was invalid" ) filter.stop() return False return True
def check_start_outbound_threshold_num(): filter = Filter(filter_name="logs", thresold="459230781640628620899862803482") filter.configure(FLOGS_CONFIG) filter.valgrind_start() sleep(0.5) if filter.check_start(): logging.error( "check_start_outbound_threshold_num: Process started when threshold was out of bounds" ) filter.stop() return False return True
def check_start_outbound_cache_num(): filter = Filter(filter_name="logs", cache_size="950288419716939937510582097494") filter.configure(FLOGS_CONFIG) filter.valgrind_start() sleep(0.5) if filter.check_start(): logging.error( "check_start_outbound_cache_num: Process started when cache size was out of bounds" ) filter.stop() return False return True
def check_start_outbound_thread_num(): filter = Filter(filter_name="logs", nb_threads="314159265358979323846264338327") filter.configure(FLOGS_CONFIG) filter.valgrind_start() sleep(0.5) if filter.check_start(): logging.error( "check_start_outbound_thread_num: Process started when thread number was out of bounds" ) filter.stop() return False return True
def check_socket_connection(): filter = Filter(filter_name="logs") pid = -1 filter.configure(FLOGS_CONFIG) filter.valgrind_start() try: api = DarwinApi(socket_path=filter.socket, socket_type="unix") api.call("test\n", filter_code="logs", response_type="back") api.close() except Exception as e: logging.error("check_socket_connection_back: Error connecting to socket: {}".format(e)) return False filter.stop() return True
def check_socket_create_delete(): filter = Filter(filter_name="logs") pid = -1 filter.configure(FLOGS_CONFIG) filter.valgrind_start() if not access(filter.socket, F_OK): logging.error("check_socket_create_delete: Socket file not accesible") return False filter.stop() if access(filter.socket, F_OK): logging.error("check_socket_create_delete: Socket file not deleted") return False return True
def check_start_stop(): filter = Filter(filter_name="logs") filter.configure(FLOGS_CONFIG) filter.valgrind_start() try: kill(filter.process.pid, 0) except OSError as e: logging.error("check_start_stop: Process {} not running: {}".format(filter.process.pid, e)) return False if filter.stop() is not True: return False return True
def fanomaly_connector_and_send_test(): test_name = "fanomaly_connector_and_send_test" ret = True config_test = '{{' \ '"redis_socket_path": "{redis_socket}",' \ '"alert_redis_list_name": "{redis_alert}",' \ '"log_file_path": "/var/log/darwin/alerts.log",' \ '"alert_redis_channel_name": "darwin.alerts"' \ '}}'.format(redis_socket=REDIS_SOCKET, redis_alert=REDIS_ALERT_LIST) config_buffer = '{{' \ '"redis_socket_path": "{redis_socket}",' \ '"input_format": [' \ '{{"name": "net_src_ip", "type": "string"}},' \ '{{"name": "net_dst_ip", "type": "string"}},' \ '{{"name": "net_dst_port", "type": "string"}},' \ '{{"name": "ip_proto", "type": "string"}}' \ '],' \ '"outputs": [' \ '{{' \ '"filter_type": "fanomaly",' \ '"filter_socket_path": "/tmp/anomaly.sock",' \ '"interval": 10,' \ '"required_log_lines": 5,' \ '"redis_lists": [{{' \ '"source": "",' \ '"name": "darwin_buffer_test"' \ '}}]' \ '}}' \ ']' \ '}}'.format(redis_socket=REDIS_SOCKET) # CONFIG buffer_filter = Buffer() buffer_filter.configure(config_buffer) test_filter = Filter(filter_name="anomaly", socket_path="/tmp/anomaly.sock") test_filter.configure(config_test) # START FILTER if not buffer_filter.valgrind_start(): print("Buffer did not start") return False if not test_filter.start(): print("Anomaly did not start") return False # SEND TEST data = buffer_filter.get_test_data() darwin_api = DarwinApi(socket_path=buffer_filter.socket, socket_type="unix") darwin_api.bulk_call( data, response_type="back", ) sleep(15) # GET REDIS DATA AND COMPARE redis_data = buffer_filter.get_internal_redis_list_data(REDIS_ALERT_LIST) expected_data = '"details": {"ip": "192.168.110.2","udp_nb_host": 1.000000,"udp_nb_port": 252.000000,"tcp_nb_host": 0.000000,"tcp_nb_port": 0.000000,"icmp_nb_host": 0.000000,"distance": 246.193959}' if len(redis_data) != 1: logging.error("{}: Expecting a single element list.".format(test_name)) ret = False redis_data = [a.decode() for a in redis_data] if (expected_data not in redis_data[0]): logging.error("{}: Expected this data : {} but got {} in redis".format( test_name, expected_data, redis_data)) ret = False # CLEAN darwin_api.close() test_filter.clean_files() # ret = buffer_filter.valgrind_stop() or buffer_filter.valgrind_stop() # would erase upper ret if this function return True if not buffer_filter.valgrind_stop(): ret = False if not test_filter.stop(): ret = False return ret