def test_regular_file_changes(sleep, tags_to_apply, get_configuration, configure_environment, restart_syscheckd, wait_for_initial_scan): """ Check if syscheckd detects regular file changes (add, modify, delete) with a very specific delay between every action. Parameters ---------- sleep : float Delay in seconds between every action. """ threshold = 1.5 if sys.platform == 'win32' else 1.25 if sleep < threshold and get_configuration['metadata']['fim_mode'] == 'whodata': pytest.xfail('Xfailing due to whodata threshold.') check_apply_test(tags_to_apply, get_configuration['tags']) file = 'regular' create_file(REGULAR, path=testdir1, name=file, content='') time.sleep(sleep) modify_file(path=testdir1, name=file, new_content='Sample') time.sleep(sleep) delete_file(path=testdir1, name=file) events = wazuh_log_monitor.start(timeout=max(sleep * 3, global_parameters.default_timeout), callback=callback_detect_event, accum_results=3, error_message='Did not receive expected "Sending FIM event: ..." event').result() for ev in events: validate_event(ev)
def test_audit_key(audit_key, path, get_configuration, configure_environment, restart_syscheckd): """Checks <audit_key> functionality by adding a audit rule and checking if alerts with that key are triggered when a file is created. This test is intended to be used with valid configurations Parameters ---------- audit_key : str Name of the audit_key to monitor path : str Path of the folder to be monitored """ check_apply_test({audit_key}, get_configuration['tags']) # Add watch rule os.system("auditctl -w " + path + " -p wa -k " + audit_key) # Restart and for wazuh truncate_file(LOG_FILE_PATH) control_service('restart') wazuh_log_monitor = FileMonitor(LOG_FILE_PATH) detect_initial_scan(wazuh_log_monitor) # Look for audit_key word create_file(REGULAR, path, "testfile") events = wazuh_log_monitor.start(timeout=30, callback=callback_audit_key, accum_results=1).result() assert audit_key in events # Remove watch rule os.system("auditctl -W " + path + " -p wa -k " + audit_key)
def extra_configuration_before_yield(): """ Create /testdir1/file1 and /testdir2/file2 before execute test """ create_file(REGULAR, testdir1, testfile1, content='') create_file(REGULAR, testdir2, testfile2, content='')
def test_create_file_scheduled(folder, name, filetype, content, checkers, tags_to_apply, get_configuration, configure_environment, restart_syscheckd, wait_for_initial_scan): """ Checks if a special or regular file creation is detected by syscheck using scheduled monitoring Regular files must be monitored. Special files must not. :param folder: Name of the monitored folder :param name: Name of the file :param filetype: Type of the file :param content: Content of the file :param checkers: Checks that will compared to the ones from the event * This test is intended to be used with valid configurations files. Each execution of this test will configure the environment properly, restart the service and wait for the initial scan. """ check_apply_test(tags_to_apply, get_configuration['tags']) # Create files create_file(filetype, folder, name, content=content) # Go ahead in time to let syscheck perform a new scan TimeMachine.travel_to_future(timedelta(hours=13)) if filetype == REGULAR: # Wait until event is detected event = wazuh_log_monitor.start( timeout=DEFAULT_TIMEOUT, callback=callback_detect_event).result() validate_event(event, checkers) else: with pytest.raises(TimeoutError): wazuh_log_monitor.start(timeout=DEFAULT_TIMEOUT, callback=callback_detect_event)
def test_duplicate_entries(get_configuration, configure_environment, restart_syscheckd, wait_for_initial_scan): """Check if syscheckd ignores duplicate entries. For instance: - The second entry should prevail over the first one. <directories realtime="yes">/home/user</directories> (IGNORED) <directories whodata="yes">/home/user</directories> OR - Just generate one event. <directories realtime="yes">/home/user,/home/user</directories> """ logger.info('Applying the test configuration') check_apply_test({'ossec_conf_duplicate_simple'}, get_configuration['tags']) file = 'hello' mode2 = get_configuration['metadata']['fim_mode2'] scheduled = mode2 == 'scheduled' mode2 = "real-time" if mode2 == "realtime" else mode2 logger.info(f'Adding file {os.path.join(testdir1, file)}, content: " "') create_file(REGULAR, testdir1, file, content=' ') logger.info(f'Time travel: {scheduled}') check_time_travel(scheduled) logger.info('Checking the event...') event1 = wazuh_log_monitor.start( timeout=global_parameters.default_timeout, callback=callback_detect_event, error_message=f'Did not receive expected event for file ' f'{os.path.join(testdir1, file)}').result() # Check for a second event event2 = check_event(previous_mode=mode2, previous_event=event1, file=file) assert event2 is None, "Multiple events created"
def test_events_while_integrity_scan(tags_to_apply, get_configuration, configure_environment, restart_syscheckd): """Check that events are being generated while a synchronization is being performed simultaneously. """ folder = testdir1 if get_configuration['metadata']['fim_mode'] == 'realtime' else testdir2 # Wait for whodata to start and the synchronization check. Since they are different threads, we cannot expect # them to come in order every time if get_configuration['metadata']['fim_mode'] == 'whodata': value_1 = wazuh_log_monitor.start(timeout=10, callback=callback_integrity_or_whodata, error_message='Did not receive expected "File integrity monitoring ' 'real-time Whodata engine started" or "Initializing ' 'FIM Integrity Synchronization check"').result() value_2 = wazuh_log_monitor.start(timeout=10, callback=callback_integrity_or_whodata, error_message='Did not receive expected "File integrity monitoring ' 'real-time Whodata engine started" or "Initializing FIM ' 'Integrity Synchronization check"').result() assert value_1 != value_2, "callback_integrity_or_whodata detected the same message twice" else: # Check the integrity scan has begun wazuh_log_monitor.start(timeout=15, callback=callback_integrity_synchronization_check, error_message='Did not receive expected ' '"Initializing FIM Integrity Synchronization check" event') # Create a file and assert syscheckd detects it while doing the integrity scan file_name = 'file' create_file(REGULAR, folder, file_name, content='') sending_event = wazuh_log_monitor.start(timeout=global_parameters.default_timeout, callback=callback_detect_event, error_message='Did not receive expected ' '"Sending FIM event: ..." event').result() assert sending_event['data']['path'] == os.path.join(folder, file_name)
def create_and_check_diff(name, path, fim_mode): """Create a file and check if it is duplicated in diff directory. Parameters ---------- name : str Name of the file to be created path : str path where the file will be created fim_mode : str FIM mode (scheduled, realtime, whodata) Returns ------- str String with the duplicated file path (diff) """ create_file(REGULAR, path, name, content='Sample content') wait_for_event(fim_mode) diff_file = os.path.join(WAZUH_PATH, 'queue', 'diff', 'local') if sys.platform == 'win32': diff_file = os.path.join(diff_file, 'c') diff_file = os.path.join( diff_file, re.match(r'^[a-zA-Z]:(\\){1,2}(\w+)(\\){0,2}$', path).group(2), name) else: diff_file = os.path.join(diff_file, path.strip('/'), name) assert os.path.exists(diff_file), f'{diff_file} does not exist' return diff_file
def test_tag_ignore(directory, event_generated, get_configuration, configure_environment, put_env_variables, restart_syscheckd, wait_for_fim_start): """ Test environment variables are ignored """ # Create text files filename = "test" create_file(REGULAR, directory, filename, content="") # Go ahead in time to let syscheck perform a new scan scheduled = get_configuration['metadata']['fim_mode'] == 'scheduled' check_time_travel(scheduled, monitor=wazuh_log_monitor) if event_generated: event = wazuh_log_monitor.start( timeout=global_parameters.default_timeout, callback=callback_detect_event, error_message='Did not receive expected ' '"Sending FIM event: ..." event').result() assert event['data']['type'] == 'added', 'Event type not equal' assert event['data']['path'] == os.path.join( directory, filename), 'Event path not equal' else: while True: ignored_file = wazuh_log_monitor.start( timeout=global_parameters.default_timeout, callback=callback_ignore).result() if ignored_file == os.path.join(directory, filename): break
def test_max_eps(get_configuration, configure_environment, restart_syscheckd, wait_for_initial_scan): """ Check that max_eps is respected when a big quantity of syscheck events are generated. During the test, a big quantity of files are created and the max number of event occurrences per second is measured to ensure it never exceeds max_eps """ check_apply_test({'max_eps'}, get_configuration['tags']) max_eps = int(get_configuration['metadata']['max_eps']) mode = get_configuration['metadata']['fim_mode'] # Create files to read max_eps files with added events for i in range(int(max_eps) * 5): create_file(REGULAR, testdir1, f'test{i}_{mode}_{max_eps}', content='') check_time_travel(mode == "scheduled") n_results = max_eps * 4 result = wazuh_log_monitor.start( timeout=(n_results / max_eps) * 6, accum_results=n_results, callback=callback_event_message, error_message=f'Received less results than expected ({n_results})' ).result() counter = Counter([date_time for date_time, _ in result]) error_margin = (max_eps * 0.1) for date_time, n_occurrences in counter.items(): assert n_occurrences <= round( max_eps + error_margin ), f'Sent {n_occurrences} but a maximum of {max_eps} was set'
def test_disk_quota_disabled(tags_to_apply, filename, folder, size, get_configuration, configure_environment, restart_syscheckd, wait_for_fim_start): """ Check that the disk_quota option is disabled correctly. Creates a file that, when compressed, is larger than the configured disk_quota limit and checks that the message about reaching the limit does not appear in the log. Parameters ---------- filename : str Name of the file to be created. folder : str Directory where the files are being created. size : int Size of each file in bytes. tags_to_apply : set Run test if matches with a configuration identifier, skip otherwise. """ check_apply_test(tags_to_apply, get_configuration['tags']) scheduled = get_configuration['metadata']['fim_mode'] == 'scheduled' to_write = generate_string(size, '0') create_file(REGULAR, folder, filename, content=to_write) check_time_travel(scheduled) with pytest.raises(TimeoutError): wazuh_log_monitor.start(timeout=global_parameters.default_timeout, callback=callback_disk_quota_limit_reached)
def test_max_files_per_second(get_configuration, configure_environment, restart_syscheckd, wait_for_fim_start): """Check that FIM sleeps for one second when the option max_files_per_second is enabled Args: tags_to_apply (set): Run test if matches with a configuration identifier, skip otherwise. get_configuration (fixture): Gets the current configuration of the test. configure_environment (fixture): Configure the environment for the execution of the test. restart_syscheckd (fixture): Restarts syscheck. wait_for_fim_start (fixture): Waits until the first FIM scan is completed. Raises: TimeoutError: If an expected event couldn't be captured. """ # Create the files in an empty folder to check realtime and whodata. for i in range(n_files_to_create): fim.create_file(fim.REGULAR, test_directories[0], f'test_{i}', content='') extra_timeout = n_files_to_create / max_files_per_second scheduled = get_configuration['metadata']['fim_mode'] == 'scheduled' fim.check_time_travel(scheduled) try: wazuh_log_monitor.start( timeout=global_parameters.default_timeout + extra_timeout, callback=fim.callback_detect_max_files_per_second) except TimeoutError as e: if get_configuration['metadata']['max_files_per_sec'] == 0: pass else: raise e
def test_file_size_default(tags_to_apply, filename, folder, get_configuration, configure_environment, restart_syscheckd, wait_for_fim_start): """ Check that the file_size option with a default value for report_changes is working correctly. Create a file smaller than the default limit and check that the compressed file has been created. If the first part is successful, increase the size of the file and expect the message for file_size limit reached and no compressed file in the queue/diff/local folder. Parameters ---------- filename : str Name of the file to be created. folder : str Directory where the files are being created. tags_to_apply : set Run test if matches with a configuration identifier, skip otherwise. """ check_apply_test(tags_to_apply, get_configuration['tags']) scheduled = get_configuration['metadata']['fim_mode'] == 'scheduled' size_limit = translate_size('50MB') diff_file_path = make_diff_file_path(folder=folder, filename=filename) # Create file with a smaller size than the configured value to_write = generate_string(int(size_limit / 10), '0') create_file(REGULAR, folder, filename, content=to_write) check_time_travel(scheduled) wazuh_log_monitor.start( timeout=global_parameters.default_timeout, callback=callback_detect_event, error_message='Did not receive expected "Sending FIM event: ..." event.' ) if not os.path.exists(diff_file_path): pytest.raises( FileNotFoundError( f"{diff_file_path} not found. It should exist before increasing the size." )) # Increase the size of the file over the configured value to_write = generate_string(size_limit, '0') modify_file_content(folder, filename, new_content=to_write * 3) check_time_travel(scheduled) wazuh_log_monitor.start( timeout=global_parameters.default_timeout, callback=callback_file_size_limit_reached, error_message='Did not receive expected ' '"File ... is too big for configured maximum size to perform diff operation" event.' ) if os.path.exists(diff_file_path): pytest.raises( FileExistsError( f"{diff_file_path} found. It should not exist after incresing the size." ))
def extra_configuration_before_yield(): """Create files and symlinks""" os.makedirs(testdir_target, exist_ok=True, mode=0o777) fim.create_file(fim.REGULAR, testdir_target, 'regular1') fim.create_file(fim.SYMLINK, testdir, 'testdir_link', target=testdir_target)
def extra_configuration_before_yield(): # Create 3000 files before restarting Wazuh to make sure the integrity scan will not finish before testing for testdir in test_directories: for file, reg in zip(file_list, subkey_list): create_file(REGULAR, testdir, file, content='Sample content') create_registry(registry_parser[key], os.path.join(key, 'SOFTWARE', reg), KEY_WOW64_64KEY)
def test_symbolic_delete_target(tags_to_apply, main_folder, aux_folder, get_configuration, configure_environment, restart_syscheckd, wait_for_initial_scan): """ Check if syscheck detects events properly when removing a target, have the symlink updated and then recreating the target CHECK: Having a symbolic link pointing to a file/folder, remove that file/folder and check that deleted event is detected. Once symlink_checker runs create the same file. No events should be raised. Wait again for symlink_checker run and modify the file. Modification event must be detected this time. :param main_folder: Directory that is being pointed at or contains the pointed file :param aux_folder: Directory that will be pointed at or will contain the future pointed file * This test is intended to be used with valid configurations files. Each execution of this test will configure the environment properly, restart the service and wait for the initial scan. """ check_apply_test(tags_to_apply, get_configuration['tags']) scheduled = get_configuration['metadata']['fim_mode'] == 'scheduled' whodata = get_configuration['metadata']['fim_mode'] == 'whodata' file1 = 'regular1' # If symlink is pointing to a directory, we need to add files and expect their 'added' event (only if the file # is being created withing the pointed directory. Then, delete the pointed file or directory if tags_to_apply == {'monitored_dir'}: create_file(REGULAR, main_folder, file1, content='') check_time_travel(scheduled) wazuh_log_monitor.start(timeout=3, callback=callback_detect_event) delete_f(main_folder) else: delete_f(main_folder, file1) check_time_travel(scheduled) delete = wazuh_log_monitor.start(timeout=3, callback=callback_detect_event).result() assert 'deleted' in delete['data']['type'] and file1 in delete['data']['path'], \ f"'deleted' event not matching for {file1}" # If syscheck is monitoring with whodata, wait for audit to reload rules wait_for_audit(whodata, wazuh_log_monitor) wait_for_symlink_check(wazuh_log_monitor) # Restore the target and don't expect any event since symlink hasn't updated the link information create_file(REGULAR, main_folder, file1, content='') check_time_travel(scheduled) with pytest.raises(TimeoutError): wazuh_log_monitor.start(timeout=3, callback=callback_detect_event) wait_for_symlink_check(wazuh_log_monitor) wait_for_audit(whodata, wazuh_log_monitor) # Modify the files and expect events since symcheck has updated now modify_file_content(main_folder, file1, 'Sample modification') check_time_travel(scheduled) modify = wazuh_log_monitor.start(timeout=3, callback=callback_detect_event).result() assert 'modified' in modify['data']['type'] and file1 in modify['data']['path'], \ f"'modified' event not matching for {file1}"
def extra_configuration_before_yield(): """Generate files to fill database""" create_file(REGULAR, testdir1, f'{base_file_name}{10}') for i in range(2, NUM_FILES_TO_CREATE): create_file(REGULAR, testdir1, f'{base_file_name}{i}', content='content')
def test_symbolic_change_target_inside_folder(tags_to_apply, previous_target, new_target, get_configuration, configure_environment, restart_syscheckd, wait_for_fim_start): """Check if syscheck stops detecting events from previous target when pointing to a new folder Having a symbolic link pointing to a file/folder, change its target to another file/folder inside a monitored folder. After symlink_checker runs check that no events for the previous target file are detected while events for the new target are still being raised. Args: tags_to_apply (set): Run test if matches with a configuration identifier, skip otherwise. previous_target (str): Previous symlink target. new_target (str): New symlink target (path). get_configuration (fixture): Gets the current configuration of the test. configure_environment (fixture): Configure the environment for the execution of the test. restart_syscheckd (fixture): Restarts syscheck. wait_for_fim_start (fixture): Waits until the first FIM scan is completed. Raises: TimeoutError: If a expected event wasn't triggered. AttributeError: If a unexpected event was captured. ValueError: If the event's type and path are not the expected. """ check_apply_test(tags_to_apply, get_configuration['tags']) scheduled = get_configuration['metadata']['fim_mode'] == 'scheduled' whodata = get_configuration['metadata']['fim_mode'] == 'whodata' file1 = 'regular1' symlink = 'symlink' if tags_to_apply == {'monitored_file'} else 'symlink2' # Check create event if it's pointing to a directory if tags_to_apply == {'monitored_dir'}: fim.create_file(fim.REGULAR, previous_target, file1, content='') fim.check_time_travel(scheduled, monitor=wazuh_log_monitor) wazuh_log_monitor.start(timeout=3, callback=fim.callback_detect_event, error_message='Did not receive expected "Sending FIM event: ..." event') # Change the target to another file and wait the symcheck to update the link information modify_symlink(new_target, os.path.join(testdir_link, symlink)) wait_for_symlink_check(wazuh_log_monitor) fim.wait_for_audit(whodata, wazuh_log_monitor) # Modify the content of the previous target and don't expect events. Modify the new target and expect an event fim.modify_file_content(previous_target, file1, new_content='Sample modification') fim.check_time_travel(scheduled, monitor=wazuh_log_monitor) with pytest.raises(TimeoutError): event = wazuh_log_monitor.start(timeout=3, callback=fim.callback_detect_event) logger.error(f'Unexpected event {event.result()}') raise AttributeError(f'Unexpected event {event.result()}') fim.modify_file_content(testdir2, file1, new_content='Sample modification') fim.check_time_travel(scheduled, monitor=wazuh_log_monitor) modify = wazuh_log_monitor.start(timeout=3, callback=fim.callback_detect_event, error_message='Did not receive expected ' '"Sending FIM event: ..." event').result() assert 'modified' in modify['data']['type'] and os.path.join(testdir2, file1) in modify['data']['path'], \ f"'modified' event not matching for {testdir2} {file1}"
def test_events_while_integrity_scan(tags_to_apply, get_configuration, configure_environment, restart_syscheckd): """Check that events are being generated while a synchronization is being performed simultaneously. """ check_apply_test(tags_to_apply, get_configuration['tags']) folder = testdir1 if get_configuration['metadata'][ 'fim_mode'] == 'realtime' else testdir2 key_h = create_registry(registry_parser[key], subkey, KEY_WOW64_64KEY) # Wait for whodata to start and the synchronization check. Since they are different threads, we cannot expect # them to come in order every time if get_configuration['metadata']['fim_mode'] == 'whodata': value_1 = wazuh_log_monitor.start( timeout=global_parameters.default_timeout * 2, callback=callback_integrity_or_whodata, error_message='Did not receive expected "File integrity monitoring ' 'real-time Whodata engine started" or "Initializing ' 'FIM Integrity Synchronization check"').result() value_2 = wazuh_log_monitor.start( timeout=global_parameters.default_timeout * 2, callback=callback_integrity_or_whodata, error_message='Did not receive expected "File integrity monitoring ' 'real-time Whodata engine started" or "Initializing FIM ' 'Integrity Synchronization check"').result() assert value_1 != value_2, "callback_integrity_or_whodata detected the same message twice" else: # Check the integrity scan has begun wazuh_log_monitor.start( timeout=global_parameters.default_timeout * 3, callback=callback_integrity_synchronization_check, error_message='Did not receive expected ' '"Initializing FIM Integrity Synchronization check" event') # Create a file and a registry value. Assert syscheckd detects it while doing the integrity scan file_name = 'file' create_file(REGULAR, folder, file_name, content='') modify_registry_value(key_h, "test_value", REG_SZ, 'added') sending_event = wazuh_log_monitor.start( timeout=global_parameters.default_timeout, callback=callback_detect_event, error_message='Did not receive expected ' '"Sending FIM event: ..." event').result() assert sending_event['data']['path'] == os.path.join(folder, file_name) TimeMachine.travel_to_future(timedelta(hours=13)) sending_event = wazuh_log_monitor.start( timeout=global_parameters.default_timeout, callback=callback_detect_event, error_message='Did not receive expected ' '"Sending FIM event: ..." event').result() assert sending_event['data']['path'] == os.path.join(key, subkey) assert sending_event['data']['arch'] == '[x64]'
def extra_configuration_before_yield(): for _ in range(1000): create_file(REGULAR, testdir1, f'test_{int(round(time() * 10**6))}', content='') create_file(REGULAR, testdir2, f'test_{int(round(time() * 10**6))}', content='')
def test_symbolic_delete_symlink(tags_to_apply, main_folder, aux_folder, get_configuration, configure_environment, restart_syscheckd, wait_for_initial_scan): """ Check if syscheck stops detecting events when deleting the monitored symlink. CHECK: Having a symbolic link pointing to a file/folder, remove that symbolic link file, wait for the symlink checker runs and modify the target file. No events should be detected. Restore the symbolic link and modify the target file again once symlink checker runs. Events should be detected now. Parameters ---------- main_folder : str Directory that is being pointed at or contains the pointed file. aux_folder : str Directory that will be pointed at or will contain the future pointed file. """ check_apply_test(tags_to_apply, get_configuration['tags']) scheduled = get_configuration['metadata']['fim_mode'] == 'scheduled' file1 = 'regular1' if tags_to_apply == {'monitored_dir'}: create_file(REGULAR, main_folder, file1, content='') check_time_travel(scheduled, monitor=wazuh_log_monitor) wazuh_log_monitor.start( timeout=3, callback=callback_detect_event, error_message= 'Did not receive expected "Sending FIM event: ..." event') # Remove symlink and don't expect events symlink = 'symlink' if tags_to_apply == {'monitored_file'} else 'symlink2' delete_f(testdir_link, symlink) wait_for_symlink_check(wazuh_log_monitor) modify_file_content(main_folder, file1, new_content='Sample modification') check_time_travel(scheduled, monitor=wazuh_log_monitor) with pytest.raises(TimeoutError): event = wazuh_log_monitor.start(timeout=3, callback=callback_detect_event) logger.error(f'Unexpected event {event.result()}') raise AttributeError(f'Unexpected event {event.result()}') # Restore symlink and modify the target again. Expect events now create_file(SYMLINK, testdir_link, symlink, target=os.path.join(main_folder, file1)) wait_for_symlink_check(wazuh_log_monitor) modify_file_content(main_folder, file1, new_content='Sample modification 2') check_time_travel(scheduled, monitor=wazuh_log_monitor) modify = wazuh_log_monitor.start(timeout=3, callback=callback_detect_event).result() assert 'modified' in modify['data']['type'] and file1 in modify['data']['path'], \ f"'modified' event not matching for {file1}"
def check_restrict(directory, trigger, check_list, file_list, timeout, scheduled): """ Standard restrict attribute test """ create_file(REGULAR, directory, file_list[0], content='') if scheduled: TimeMachine.travel_to_future(timedelta(hours=13)) while True: ignored_file = wazuh_log_monitor.start( timeout=timeout, callback=callback_restricted).result() if ignored_file == os.path.join(directory, file_list[0]): break
def extra_configuration_before_yield(): """ Setup the symlink to one folder """ # Symlink pointing to testdir1 fim.create_file(fim.SYMLINK, symlink_root_path, symlink_name, target=testdir1) # Set symlink_scan_interval to a given value fim.change_internal_options(param='syscheck.symlink_scan_interval', value=link_interval)
def test_delete_folder(folder, file_list, filetype, tags_to_apply, get_configuration, configure_environment, restart_syscheckd, wait_for_fim_start): """ Check if syscheckd detects 'deleted' events from the files contained in a folder that is being deleted. If we are monitoring /testdir and we have r1, r2, r3 withing /testdir, if we delete /testdir, we must see 3 events of the type 'deleted'. One for each one of the regular files. Parameters ---------- folder : str Directory where the files will be created. file_list : list Names of the files. filetype : str Type of the files that will be created. """ check_apply_test(tags_to_apply, get_configuration['tags']) scheduled = get_configuration['metadata']['fim_mode'] == 'scheduled' mode = get_configuration['metadata']['fim_mode'] # Create files inside subdir folder for file in file_list: create_file(filetype, folder, file, content='') check_time_travel(scheduled, monitor=wazuh_log_monitor) events = wazuh_log_monitor.start(timeout=global_parameters.default_timeout, callback=callback_detect_event, accum_results=len(file_list), error_message='Did not receive expected ' '"Sending FIM event: ..." event').result() for ev in events: validate_event(ev, mode=mode) # Remove folder shutil.rmtree(folder, ignore_errors=True) check_time_travel(scheduled, monitor=wazuh_log_monitor) # Expect deleted events event_list = wazuh_log_monitor.start(timeout=global_parameters.default_timeout, callback=callback_detect_event, error_message='Did not receive expected ' '"Sending FIM event: ..." event', accum_results=len(file_list)).result() path_list = set([event['data']['path'] for event in event_list]) counter_type = Counter([event['data']['type'] for event in event_list]) for ev in events: validate_event(ev, mode=mode) assert counter_type['deleted'] == len(file_list), f'Number of "deleted" events should be {len(file_list)}' for file in file_list: assert os.path.join(folder, file) in path_list, f'File {file} not found within the events'
def test_audit_rules_removed_after_change_link(replaced_target, new_target, file_name, tags_to_apply, get_configuration, configure_environment, restart_syscheckd, wait_for_fim_start): """Test that checks if the audit rules are removed when the symlink target's is changed. Args: replaced_target (str): Directory where the link is pointing. new_target (str): Directory where the link will be pointed after it's updated. file_name (str): Name of the file that will be created inside the folders. tags_to_apply (set): Run test if matches with a configuration identifier, skip otherwise. get_configuration (fixture): Gets the current configuration of the test. configure_environment (fixture): Configure the environment for the execution of the test. restart_syscheckd (fixture): Restarts syscheck. wait_for_fim_start (fixture): Waits until the first FIM scan is completed. Raises: TimeoutError: If an expected event couldn't be captured. ValueError: If the event type isn't added or if the audit rule for ``replaced_target`` isn't removed. """ check_apply_test(tags_to_apply, get_configuration['tags']) fim.create_file(fim.REGULAR, replaced_target, file_name) ev = wazuh_log_monitor.start( timeout=global_parameters.default_timeout, callback=fim.callback_detect_event, error_message='Did not receive expected "Sending FIM event: ..." event' ).result() assert ev['data']['type'] == 'added' and ev['data'][ 'path'] == os.path.join(replaced_target, file_name) # Change the target of the symlink and expect events while there's no syscheck scan modify_symlink(new_target, symlink_path) wait_for_symlink_check(wazuh_log_monitor) fim.wait_for_audit(True, wazuh_log_monitor) rules_paths = str(subprocess.check_output(['auditctl', '-l'])) fim.create_file(fim.REGULAR, new_target, file_name) ev = wazuh_log_monitor.start( timeout=global_parameters.default_timeout, callback=fim.callback_detect_event, error_message='Did not receive expected "Sending FIM event: ..." event' ).result() assert ev['data']['type'] == 'added' and ev['data'][ 'path'] == os.path.join(new_target, file_name) assert replaced_target not in rules_paths, f'The audit rule has been reloaded for {replaced_target}'
def test_cache(tags_to_apply, get_configuration, configure_api_environment, restart_api, wait_for_start, get_api_details): """Verify that the stored response is returned when cache is enabled. Calls to rules endpoints can be cached. This test verifies that the result of the first call to a rule endpoint is equal to the second within a period established in the configuration, even though a new file has been created during the process. Parameters ---------- tags_to_apply : set Run test if match with a configuration identifier, skip otherwise. """ check_apply_test(tags_to_apply, get_configuration['tags']) cache = get_configuration['configuration']['cache']['enabled'] api_details = get_api_details() api_details['base_url'] += '/rules/files' # Request number of rules files before creating a new one. first_response = requests.get( api_details['base_url'], headers=api_details['auth_headers'], verify=False).json()['data']['total_affected_items'] # Create a new file inside /var/ossec/ruleset/rules create_file(REGULAR, rules_directory, test_file) # Request again the number of rules files after creating a new one. second_response = requests.get( api_details['base_url'], headers=api_details['auth_headers'], verify=False).json()['data']['total_affected_items'] # If cache is enabled, number of files should be the same in the first and second response even with a new one. if cache: assert first_response == second_response, 'Total_affected_items should be equal in first and second response ' \ 'when cache is enabled.' # Wait until cache expires (10 seconds) and verify that new response is updated. time.sleep(11) third_response = requests.get( api_details['base_url'], headers=api_details['auth_headers'], verify=False).json()['data']['total_affected_items'] assert first_response + 1 == third_response, 'Cache should have expired but the response is still outdated.' else: # Verify that the second response is updated when cache is disabled. assert first_response + 1 == second_response, 'Total_affected_items should be smaller in first response if ' \ 'cache is disabled.'
def test_check_all_no(path, checkers, get_configuration, configure_environment, restart_syscheckd, wait_for_initial_scan): """ Test the functionality of `check_all` option when set to no. When setting `check_all` to no, only 'type' and 'checksum' attributes should appear in every event. This will avoid any modification event. Parameters ---------- path : str Directory where the file is being created and monitored. checkers : dict Check options to be used. """ check_apply_test({'test_check_all_no'}, get_configuration['tags']) scheduled = get_configuration['metadata']['fim_mode'] == 'scheduled' # Create regular file and dont expect any check file = 'regular' create_file(REGULAR, path, file) check_time_travel(scheduled, monitor=wazuh_log_monitor) create_event = wazuh_log_monitor.start( callback=callback_detect_event, timeout=15, error_message='Did not receive expected ' '"Sending FIM event: ..." event').result() assert create_event['data']['type'] == 'added' assert list( create_event['data']['attributes'].keys()) == ['type', 'checksum'] # Delete regular file and dont expect any check. Since it is not using any check, modification events will not # be triggered modify_file(path, file, 'Sample modification') with pytest.raises(TimeoutError): event = wazuh_log_monitor.start(callback=callback_detect_event, timeout=5) raise AttributeError(f'Unexpected event {event}') delete_file(path, file) check_time_travel(scheduled, monitor=wazuh_log_monitor) delete_event = wazuh_log_monitor.start( callback=callback_detect_event, timeout=15, error_message='Did not receive expected ' '"Sending FIM event: ..." event').result() assert delete_event['data'][ 'type'] == 'deleted', f'Current value is {delete_event["data"]["type"]}' assert list(delete_event['data']['attributes'].keys()) == ['type', 'checksum'], \ f'Current value is {list(delete_event["data"]["attributes"].keys())}'
def check_restrict(directory, trigger, check_list, file_list, timeout, scheduled): """Standard restrict attribute test""" create_file(REGULAR, directory, file_list[0], content='') check_time_travel(scheduled, monitor=wazuh_log_monitor) while True: ignored_file = wazuh_log_monitor.start( timeout=timeout, callback=callback_restricted, error_message=f'TimeoutError was raised because a single ' f'"ignoring file {file_list[0]} due to restriction ..." ' f'was expected for {file_list[0]} but was not detected.').result() if ignored_file == os.path.join(directory, file_list[0]): break
def create_files(files, folder, content=b''): """Create all the files in the list Parameters ---------- files : list List of names to create files. folder : str Directory where the files are being created. content : basestring Content to write in each file. """ for file in files: create_file(REGULAR, folder, file, content=content)
def test_ignore_works_over_restrict(folder, filename, triggers_event, tags_to_apply, get_configuration, configure_environment, restart_syscheckd, wait_for_initial_scan): """Check if the ignore tag prevails over the restrict one when using both in the same directory. This test is intended to be used with valid configurations files. Each execution of this test will configure the environment properly, restart the service and wait for the initial scan. Parameters ---------- folder : str Directory where the file is being created filename : str Name of the file to be created triggers_event : bool True if an event must be generated, False otherwise tags_to_apply : set Run test if it matches with a configuration identifier, skip otherwise """ logger.info('Applying the test configuration') check_apply_test(tags_to_apply, get_configuration['tags']) scheduled = get_configuration['metadata']['fim_mode'] == 'scheduled' # Create file that must be ignored logger.info(f'Adding file {os.path.join(testdir1, filename)}, content: ""') create_file(REGULAR, folder, filename, content='') # Go ahead in time to let syscheck perform a new scan if mode is scheduled logger.info(f'Time travel: {scheduled}') check_time_travel(scheduled, monitor=wazuh_log_monitor) if triggers_event: logger.info('Checking the event...') event = wazuh_log_monitor.start(timeout=global_parameters.default_timeout, callback=callback_detect_event, error_message=f'Did not receive expected "Sending FIM event" ' f'event for file {os.path.join(testdir1, filename)}').result() assert event['data']['type'] == 'added', 'Event type not equal' assert event['data']['path'] == os.path.join(folder, filename), 'Event path not equal' else: while True: ignored_file = wazuh_log_monitor.start(timeout=global_parameters.default_timeout, callback=callback_ignore, error_message=f'Did not receive expected ' f'"Ignoring ... due to ..." event for file ' f'{os.path.join(testdir1, filename)}').result() if ignored_file == os.path.join(folder, filename): break
def test_create_file_scheduled(folder, name, filetype, content, checkers, tags_to_apply, encoding, get_configuration, configure_environment, restart_syscheckd, wait_for_initial_scan): """ Check if a special or regular file creation is detected by syscheck using scheduled monitoring Regular files must be monitored. Special files must not. Parameters ---------- folder : str Name of the monitored folder. name : str Name of the file. filetype : str Type of the file. content : str Content of the file. checkers : set Checks that will compared to the ones from the event. """ check_apply_test(tags_to_apply, get_configuration['tags']) # Create files if encoding is not None: name = name.encode(encoding) folder = folder.encode(encoding) create_file(filetype, folder, name, content=content) # Go ahead in time to let syscheck perform a new scan check_time_travel(True, monitor=wazuh_log_monitor) if filetype == REGULAR: # Wait until event is detected event = wazuh_log_monitor.start( timeout=global_parameters.default_timeout, callback=callback_detect_event, encoding=encoding, error_message= 'Did not receive expected "Sending FIM event: ..." event').result( ) validate_event(event, checkers) else: with pytest.raises(TimeoutError): event = wazuh_log_monitor.start( timeout=global_parameters.default_timeout, callback=callback_detect_event) raise AttributeError(f'Unexpected event {event}')