def test_agentd_state(configure_environment, test_case: list): global remoted_server if remoted_server is not None: remoted_server.stop() # Stop service control_service('stop') if 'interval' in test_case['input']: set_state_interval(test_case['input']['interval'], internal_options) else: set_state_interval(1, internal_options) # Truncate ossec.log in order to watch it correctly truncate_file(LOG_FILE_PATH) # Remove state file to check if agent behavior is as expected os.remove(state_file_path) if os.path.exists(state_file_path) else None # Add dummy key in order to communicate with RemotedSimulator add_custom_key() # Start service control_service('start') # Start RemotedSimulator if test case need it if 'remoted' in test_case['input'] and test_case['input']['remoted']: remoted_server = RemotedSimulator(protocol='tcp', mode='DUMMY_ACK', client_keys=CLIENT_KEYS_PATH) # Check fields for every expected output type for expected_output in test_case['output']: check_fields(expected_output)
def modify_feed(test_values, request): """ Modify the Debian OVAL feed, setting a test tag value """ backup_data = file.read_xml_file(file_path=custom_debian_oval_feed_path, namespaces=vd.XML_FEED_NAMESPACES, xml_header=True) modified_data = insert_xml_tag(pattern=insert_pattern, tag=test_values[0], value=test_values[1], data=str(backup_data)) file.write_file(file_path=custom_debian_oval_feed_path, data=modified_data) vd.clean_vuln_and_sys_programs_tables() control_service('restart', daemon='wazuh-modulesd') vd.set_system(system='BUSTER') yield file.write_file(file_path=custom_debian_oval_feed_path, data=backup_data) vd.clean_vuln_and_sys_programs_tables() truncate_file(LOG_FILE_PATH)
def restart_logcollector(get_configuration, request): """Reset log file and start a new monitor.""" control_service('stop', daemon=DAEMON_NAME) truncate_file(LOG_FILE_PATH) file_monitor = FileMonitor(LOG_FILE_PATH) setattr(request.module, 'wazuh_log_monitor', file_monitor) control_service('start', daemon=DAEMON_NAME)
def file_composer(self, host, path, output_path): """Collects the file content of the specified path in the desired host and append it to the output_path file. Simulates the behavior of tail -f and redirect the output to output_path. Parameters ---------- host : str Hostname. path : str Host file path to be collect. output_path : str Output path of the content collected from the remote host path. """ try: truncate_file(os.path.join(self._tmp_path, output_path)) except FileNotFoundError: pass logger.debug(f'Starting file composer for {host} and path: {path}. ' f'Composite file in {os.path.join(self._tmp_path, output_path)}') tmp_file = os.path.join(self._tmp_path, output_path) while True: with FileLock(tmp_file): with open(tmp_file, "r+") as file: content = self.host_manager.get_file_content(host, path).split('\n') file_content = file.read().split('\n') for new_line in content: if new_line == '': continue if new_line not in file_content: file.write(f'{new_line}\n') time.sleep(self._time_step)
def test_agentd_state_config(configure_environment, test_case: list): control_service('stop', 'wazuh-agentd') # Truncate ossec.log in order to watch it correctly truncate_file(LOG_FILE_PATH) # Remove state file to check if agent behavior is as expected os.remove(state_file_path) if os.path.exists(state_file_path) else None # Set state interval value according to test case specs set_state_interval(test_case['interval'], internal_options) control_service('start', 'wazuh-agentd') # Check if test require checking state file existance if 'state_file_exist' in test_case: if test_case['state_file_exist']: # Wait until state file was dumped time.sleep(test_case['interval']) assert test_case['state_file_exist'] == os.path.exists(state_file_path) # Follow ossec.log to find desired messages by a callback wazuh_log_monitor = FileMonitor(LOG_FILE_PATH) wazuh_log_monitor.start(timeout=global_parameters.default_timeout, callback=callbacks.get(test_case['log_expect']), error_message='Event not found') assert wazuh_log_monitor.result() # Check if test require checking agentd status if 'agentd_ends' in test_case: assert (test_case['agentd_ends'] is not check_if_process_is_running('wazuh-agentd'))
def restart_api(get_configuration, request): # Reset api.log and start a new monitor subprocess.call([os.path.join(WAZUH_PATH, 'bin', 'wazuh-apid'), 'stop']) truncate_file(API_LOG_FILE_PATH) file_monitor = FileMonitor(API_LOG_FILE_PATH) setattr(request.module, 'wazuh_log_monitor', file_monitor) subprocess.call([os.path.join(WAZUH_PATH, 'bin', 'wazuh-apid'), 'start'])
def restart_modulesd(get_configuration, request): # Reset ossec.log and start a new monitor control_service('stop', daemon='wazuh-modulesd') truncate_file(LOG_FILE_PATH) file_monitor = FileMonitor(LOG_FILE_PATH) setattr(request.module, 'wazuh_log_monitor', file_monitor) control_service('start', daemon='wazuh-modulesd')
def test_log_format(get_configuration, configure_environment): """Check if Wazuh log format field of logcollector works properly. Ensure Wazuh component fails in case of invalid values and works properly in case of valid log format values. Raises: TimeoutError: If expected callbacks are not generated. """ cfg = get_configuration['metadata'] control_service('stop', daemon=LOGCOLLECTOR_DAEMON) truncate_file(LOG_FILE_PATH) if cfg['valid_value']: control_service('start', daemon=LOGCOLLECTOR_DAEMON) check_log_format_valid(cfg) else: if sys.platform == 'win32': expected_exception = ValueError else: expected_exception = sb.CalledProcessError with pytest.raises(expected_exception): control_service('start', daemon=LOGCOLLECTOR_DAEMON) check_log_format_invalid(cfg)
def modify_feed(test_values, request): """Modify the Arch Linux JSON feed by setting a test tag value.""" backup_data = read_json_file(custom_archlinux_json_feed_path) modified_data = deepcopy(backup_data) modified_data[0]['replace_this'] = test_values[1] modified_string = json.dumps(modified_data, indent=4) new_key = test_values[0] if isinstance(new_key, str): new_key = f'"{new_key}"' else: new_key = str(new_key) modified_string = modified_string.replace('"replace_this"', new_key) write_file(custom_archlinux_json_feed_path, modified_string) vd.clean_vuln_and_sys_programs_tables() control_service('restart', daemon='wazuh-modulesd') vd.set_system(system='ARCH') yield write_json_file(custom_archlinux_json_feed_path, backup_data) vd.clean_vuln_and_sys_programs_tables() file.truncate_file(LOG_FILE_PATH)
def modify_feed(test_data, custom_input, request): """ Modify the MSU feed, setting a test field value """ backup_data = read_json_file(custom_msu_json_feed_path) data = read_json_file(custom_msu_json_feed_path) modified_data = dict(data['vulnerabilities']['CVE-010'][0]) modified_data[test_data['field']] = custom_input data['vulnerabilities']['CVE-010'][0] = modified_data write_json_file(custom_msu_json_feed_path, data) vd.clean_vuln_and_sys_programs_tables() control_service('restart', daemon='wazuh-modulesd') vd.set_system(system='Windows10') yield write_json_file(custom_msu_json_feed_path, backup_data) vd.clean_vuln_and_sys_programs_tables() truncate_file(LOG_FILE_PATH)
def remove_tag_feed(request): """ It allows to modify the feed by removing a certain tag and loading the new feed configuration """ backup_data = file.read_xml_file(file_path=custom_canonical_oval_feed_path, namespaces=vd.XML_FEED_NAMESPACES) data_removed_tag = replace_regex(request.param['pattern'], '', str(backup_data)) file.write_file(file_path=custom_canonical_oval_feed_path, data=data_removed_tag) vd.clean_vuln_and_sys_programs_tables() control_service('restart', daemon='wazuh-modulesd') vd.set_system(system='BIONIC') yield request.param file.write_file(file_path=custom_canonical_oval_feed_path, data=backup_data) vd.clean_vuln_and_sys_programs_tables() file.truncate_file(LOG_FILE_PATH)
def remove_field_feed(request): """ It allows to modify the feed by removing a certain field and loading the new feed configuration """ backup_data = read_json_file(custom_msu_json_feed_path) data = read_json_file(custom_msu_json_feed_path) data_removed_field = dict(data['vulnerabilities']['CVE-010'][0]) data_removed_field.pop(request.param, None) data['vulnerabilities']['CVE-010'][0] = data_removed_field write_json_file(custom_msu_json_feed_path, data) vd.clean_vuln_and_sys_programs_tables() control_service('restart', daemon='wazuh-modulesd') vd.set_system(system='Windows10') yield request.param write_json_file(custom_msu_json_feed_path, backup_data) vd.clean_vuln_and_sys_programs_tables() truncate_file(LOG_FILE_PATH)
def override_wazuh_conf(configuration, set_password): # Stop Wazuh control_service('stop', daemon='wazuh-authd') time.sleep(1) check_daemon_status(running=False, daemon='wazuh-authd') truncate_file(LOG_FILE_PATH) # Configuration for testing test_config = set_section_wazuh_conf(configuration.get('sections')) # Set new configuration write_wazuh_conf(test_config) # reset_client_keys clean_client_keys_file() # reset password reset_password(set_password) time.sleep(1) # Start Wazuh control_service('start', daemon='wazuh-authd') """Wait until agentd has begun""" def callback_agentd_startup(line): if 'Accepting connections on port 1515' in line: return line return None log_monitor = FileMonitor(LOG_FILE_PATH) log_monitor.start(timeout=30, callback=callback_agentd_startup) time.sleep(1)
def modify_feed(test_values, request): """ Modify the MSU OVAL feed, setting a test field value """ backup_data = read_json_file(custom_msu_json_feed_path) modified_data = dict(backup_data) # Insert key:value pair as string, since otherwise, you could not insert lists or dictionaries as a key modified_string_data = vd.insert_data_json_feed(data=modified_data, field_name=test_values[0], field_value=test_values[1], append_data=None) write_file(custom_msu_json_feed_path, modified_string_data) vd.clean_vuln_and_sys_programs_tables() control_service('restart', daemon='wazuh-modulesd') vd.set_system(system='Windows10') yield write_json_file(custom_msu_json_feed_path, backup_data) vd.clean_vuln_and_sys_programs_tables() truncate_file(LOG_FILE_PATH)
def wait_to_remoted_key_update(wazuh_log_monitor): """Allow to detect when remoted has updated its info with the client.keys. This is necessary for remoted to correctly recognize the agent, and to be able to decrypt its messages. The reload time is editable in the internal_options.conf and defaults to 10 seconds. >> remoted.keyupdate_interval=10 It is recommended to set this time to 5 or less for testing. Args: wazuh_log_monitor (FileMonitor): FileMonitor object to monitor the Wazuh log. Raises: TimeoutError: if could not find the remoted key loading log. """ # We have to make sure that remoted has correctly loaded the client key agent info. The log is truncated to # ensure that the information has been loaded after the agent has been registered. file.truncate_file(tools.LOG_FILE_PATH) callback_pattern = '.*rem_keyupdate_main().*Checking for keys file changes.' error_message = 'Could not find the remoted key loading log' check_remoted_log_event(wazuh_log_monitor, callback_pattern, error_message, timeout=20)
def modify_feed(test_data, request): """ Modify the redhat OVAL feed, setting a test field value """ backup_data = file.read_xml_file(file_path=custom_redhat_oval_feed_path, namespaces=vd.XML_FEED_NAMESPACES, xml_header=True) modified_data = replace_regex(pattern=test_data['pattern'], new_value=test_data['update'], data=str(backup_data), replace_group=True) file.write_file(file_path=custom_redhat_oval_feed_path, data=modified_data) vd.clean_vuln_and_sys_programs_tables() control_service('restart', daemon='wazuh-modulesd') vd.set_system(system='RHEL8') yield file.write_file(file_path=custom_redhat_oval_feed_path, data=backup_data) vd.clean_vuln_and_sys_programs_tables() file.truncate_file(LOG_FILE_PATH)
def extra_configuration_after_yield(): if remoted_server is not None: remoted_server.stop() # Set default values change_internal_options('agent.debug', '0') set_state_interval(5, internal_options) truncate_file(CLIENT_KEYS_PATH)
def restart_syscheck_function(get_configuration, request): """ Reset ossec.log and start a new monitor. """ control_service('stop', daemon='wazuh-syscheckd') truncate_file(fim.LOG_FILE_PATH) file_monitor = FileMonitor(fim.LOG_FILE_PATH) setattr(request.module, 'wazuh_log_monitor', file_monitor) control_service('start', daemon='wazuh-syscheckd')
def test_skip_proc(get_configuration, configure_environment, restart_syscheckd, wait_for_initial_scan): """Check if syscheckd skips /proc when setting 'skip_proc="yes"'.""" check_apply_test({'skip_proc'}, get_configuration['tags']) trigger = get_configuration['metadata']['skip'] == 'no' if trigger: proc = subprocess.Popen([ "python3", f"{os.path.dirname(os.path.abspath(__file__))}/data/proc.py" ]) # Change configuration, monitoring the PID path in /proc # Monitor only /proc/PID to expect only these events. Otherwise, it will fail due to Timeouts since # integrity scans will take too long new_conf = change_conf(f'/proc/{proc.pid}') new_ossec_conf = [] # Get new skip_proc configuration for conf in new_conf: if conf['metadata']['skip'] == 'no' and conf['tags'] == [ 'skip_proc' ]: new_ossec_conf = set_section_wazuh_conf(conf.get('sections')) restart_wazuh_with_new_conf(new_ossec_conf) truncate_file(LOG_FILE_PATH) proc_monitor = FileMonitor(LOG_FILE_PATH) detect_initial_scan(proc_monitor) # Do not expect any 'Sending event' with pytest.raises(TimeoutError): proc_monitor.start( timeout=3, callback=callback_detect_event, error_message= 'Did not receive expected "Sending FIM event: ..." event') check_time_travel(time_travel=True, monitor=wazuh_log_monitor) found_event = False while not found_event: event = proc_monitor.start( timeout=5, callback=callback_detect_event, error_message='Did not receive expected ' '"Sending FIM event: ..." event').result() if f'/proc/{proc.pid}/' in event['data'].get('path'): found_event = True # Kill the process subprocess.Popen(["kill", "-9", str(proc.pid)]) else: with pytest.raises(TimeoutError): event = wazuh_log_monitor.start( timeout=3, callback=callback_detect_integrity_state) raise AttributeError(f'Unexpected event {event}')
def restart_modulesd_catching_ossec_conf_error(request): control_service('stop', daemon='wazuh-modulesd') truncate_file(LOG_FILE_PATH) file_monitor = FileMonitor(LOG_FILE_PATH) setattr(request.module, 'wazuh_log_monitor', file_monitor) try: control_service('start', daemon='wazuh-modulesd') except (ValueError, CalledProcessError): pass
def restart_remoted(get_configuration, request): # Reset ossec.log and start a new monitor control_service('stop', daemon=DAEMON_NAME) truncate_file(LOG_FILE_PATH) file_monitor = FileMonitor(LOG_FILE_PATH) setattr(request.module, 'wazuh_log_monitor', file_monitor) try: control_service('start', daemon=DAEMON_NAME) except sb.CalledProcessError: pass
def restart_wazuh(get_configuration, request): # Stop Wazuh control_service('stop') # Reset ossec.log and start a new monitor truncate_file(LOG_FILE_PATH) file_monitor = FileMonitor(LOG_FILE_PATH) setattr(request.module, 'wazuh_log_monitor', file_monitor) # Start Wazuh control_service('start')
def restart_syscheckd_each_time(request): control_service('stop', daemon='ossec-syscheckd') truncate_file(LOG_FILE_PATH) file_monitor = FileMonitor(LOG_FILE_PATH) setattr(request.module, 'wazuh_log_monitor', file_monitor) if not os.path.exists(testdir): os.mkdir(testdir) control_service('start', daemon='ossec-syscheckd') detect_initial_scan(file_monitor)
def restart_wazuh_alerts(get_configuration, request): # Stop Wazuh control_service('stop') # Reset alerts.json and start a new monitor truncate_file(ALERT_FILE_PATH) file_monitor = FileMonitor(ALERT_FILE_PATH) setattr(request.module, 'wazuh_alert_monitor', file_monitor) # Start Wazuh control_service('start')
def test_configuration_age_datetime(new_datetime, get_files_list, get_configuration, create_file_structure_function, configure_environment): """Check if logcollector age option works correctly when date time of the system changes. Ensure that when date of the system change logcollector use properly age value, ignoring files that have not been modified for a time greater than age value using current date. Raises: TimeoutError: If the expected callbacks are not generated. """ cfg = get_configuration['metadata'] age_seconds = time_to_seconds(cfg['age']) control_service('stop', daemon=DAEMON_NAME) truncate_file(LOG_FILE_PATH) wazuh_log_monitor = FileMonitor(LOG_FILE_PATH) control_service('start', daemon=DAEMON_NAME) TimeMachine.travel_to_future(time_to_timedelta(new_datetime)) for file in file_structure: for name in file['filename']: absolute_file_path = os.path.join(file['folder_path'], name) log_callback = logcollector.callback_match_pattern_file( cfg['location'], absolute_file_path) wazuh_log_monitor.start(timeout=5, callback=log_callback, error_message=f"{name} was not detected") fileinfo = os.stat(absolute_file_path) current_time = time.time() mfile_time = current_time - fileinfo.st_mtime if age_seconds <= int(mfile_time): log_callback = logcollector.callback_ignoring_file( absolute_file_path) wazuh_log_monitor.start( timeout=5, callback=log_callback, error_message=f"{name} was not ignored") else: with pytest.raises(TimeoutError): log_callback = logcollector.callback_ignoring_file( absolute_file_path) wazuh_log_monitor.start( timeout=5, callback=log_callback, error_message=f"{name} was not ignored") TimeMachine.time_rollback()
def create_archives_log_monitor(): """Create a FileMonitor for the archives.log file. Returns: FileMonitor: object to monitor the archives.log. """ # Reset archives.log and start a new monitor file.truncate_file(tools.ARCHIVES_LOG_FILE_PATH) wazuh_archives_log_monitor = monitoring.FileMonitor( tools.ARCHIVES_LOG_FILE_PATH) return wazuh_archives_log_monitor
def configure_mitm_environment(request): """Configure environment for sockets and MITM""" monitored_sockets_params = getattr(request.module, 'monitored_sockets_params') log_monitor_paths = getattr(request.module, 'log_monitor_paths') # Stop wazuh-service and ensure all daemons are stopped control_service('stop') check_daemon_status(running=False) monitored_sockets = list() mitm_list = list() log_monitors = list() # Truncate logs and create FileMonitors for log in log_monitor_paths: truncate_file(log) log_monitors.append(FileMonitor(log)) # Start selected daemons and monitored sockets MITM for daemon, mitm, daemon_first in monitored_sockets_params: not daemon_first and mitm is not None and mitm.start() control_service('start', daemon=daemon, debug_mode=True) check_daemon_status( running=True, daemon=daemon, extra_sockets=[mitm.listener_socket_address] if mitm is not None and mitm.family == 'AF_UNIX' else None) daemon_first and mitm is not None and mitm.start() if mitm is not None: monitored_sockets.append(QueueMonitor(queue_item=mitm.queue)) mitm_list.append(mitm) setattr(request.module, 'monitored_sockets', monitored_sockets) setattr(request.module, 'log_monitors', log_monitors) yield # Stop daemons and monitored sockets MITM for daemon, mitm, _ in monitored_sockets_params: mitm is not None and mitm.shutdown() control_service('stop', daemon=daemon) check_daemon_status( running=False, daemon=daemon, extra_sockets=[mitm.listener_socket_address] if mitm is not None and mitm.family == 'AF_UNIX' else None) # Delete all db delete_dbs() control_service('start')
def configure_environment_standalone_daemons(request): """Configure a custom environment for testing with specific Wazuh daemons only. Stopping wazuh-service is needed.""" def remove_logs(): """Remove all Wazuh logs""" for root, dirs, files in os.walk(WAZUH_LOGS_PATH): for file in files: os.remove(os.path.join(root, file)) # Stop wazuh-service and ensure all daemons are stopped control_service('stop') check_daemon_status(running=False) # Remove all remaining Wazuh sockets delete_sockets() # Start selected daemons in debug mode and ensure they are running for daemon in getattr(request.module, 'used_daemons'): control_service('start', daemon=daemon, debug_mode=True) check_daemon_status(running=True, daemon=daemon) # Clear all Wazuh logs truncate_file(LOG_FILE_PATH) # Call extra functions before yield if hasattr(request.module, 'extra_configuration_before_yield'): func = getattr(request.module, 'extra_configuration_before_yield') func() yield # Call extra functions after yield if hasattr(request.module, 'extra_configuration_after_yield'): func = getattr(request.module, 'extra_configuration_after_yield') func() # Stop selected daemons for daemon in getattr(request.module, 'used_daemons'): control_service('stop', daemon=daemon) # Remove all remaining Wazuh sockets delete_sockets() # Remove all Wazuh logs remove_logs()
def magic(*args, **kwargs): control_service('stop', daemon='wazuh-modulesd') control_service('stop', daemon='wazuh-db') # Clean tables clean_vd_tables(agent=kwargs['mock_agent']) func(*args, **kwargs) # Truncate ossec.log file.truncate_file(LOG_FILE_PATH) control_service('start', daemon='wazuh-modulesd') control_service('start', daemon='wazuh-db') yield kwargs['request'].param clean_vuln_and_sys_programs_tables(agent=kwargs['mock_agent'])
def remove_field_feed(request): """It allows to modify the feed by removing a certain field and loading the new feed configuration.""" backup_data = read_json_file(custom_archlinux_json_feed_path) modified_data = deepcopy(backup_data) modified_data[0].pop(request.param, None) write_json_file(custom_archlinux_json_feed_path, modified_data) vd.clean_vuln_and_sys_programs_tables() control_service('restart', daemon='wazuh-modulesd') vd.set_system(system='Windows10') yield request.param write_json_file(custom_archlinux_json_feed_path, backup_data) vd.clean_vuln_and_sys_programs_tables() truncate_file(LOG_FILE_PATH)