def get_agents_sync_group(agent_list=None): """Get agents configuration sync status. :param agent_list: List of agents ID's. :return AffectedItemsWazuhResult. """ result = AffectedItemsWazuhResult( all_msg='Sync info was returned for all selected agents', some_msg='Sync info was not returned for some selected agents', none_msg='No sync info was returned', ) system_agents = get_agents_info() for agent_id in agent_list: try: if agent_id == "000": raise WazuhError(1703) if agent_id not in system_agents: raise WazuhResourceNotFound(1701) else: # Check if agent exists and it is active agent_info = Agent(agent_id).get_basic_information() # Check if it has a multigroup if len(agent_info['group']) > 1: multi_group = ','.join(agent_info['group']) multi_group = hashlib.sha256( multi_group.encode()).hexdigest()[:8] group_merged_path = path.join(common.multi_groups_path, multi_group, "merged.mg") else: group_merged_path = path.join(common.shared_path, agent_info['group'][0], "merged.mg") result.affected_items.append({ 'id': agent_id, 'synced': md5(group_merged_path) == agent_info['mergedSum'] }) except (IOError, KeyError): # The file couldn't be opened and therefore the group has not been synced result.affected_items.append({'id': agent_id, 'synced': False}) except WazuhException as e: result.add_failed_item(id_=agent_id, error=e) result.total_affected_items = len(result.affected_items) return result
def walk_dir(dirname, recursive, files, excluded_files, excluded_extensions, get_cluster_item_key, get_md5=True, whoami='master'): """Iterate recursively inside a directory, save the path of each found file and obtain its metadata. Parameters ---------- dirname : str Directory within which to look for files. recursive : bool Whether to recursively look for files inside found directories. files : list List of files to obtain information from. excluded_files : list List of files to ignore. excluded_extensions : list List of extensions to ignore. get_cluster_item_key : str Key inside cluster.json['files'] to which each file belongs. This is useful to know what actions to take after sending a file from one node to another, depending on the directory the file belongs to. get_md5 : bool Whether to calculate and save the MD5 hash of the found file. whoami : str TODO - To be deprecated. Returns ------- walk_files : dict Paths (keys) and metadata (values) of the requested files found inside 'dirname'. """ walk_files = {} # Get list of all files and directories inside 'dirname'. try: entries = listdir(common.ossec_path + dirname) except OSError as e: raise WazuhError(3015, str(e)) for entry in entries: # If file is inside 'excluded_files' or file extension is inside 'excluded_extensions', skip over. if entry in excluded_files or reduce( add, map(lambda x: entry[-(len(x)):] == x, excluded_extensions)): continue try: # Relative path to listed file. full_path = path.join(dirname, entry) # If 'all' files have been requested or entry is in the specified files list. if entry in files or files == ["all"]: if not path.isdir(common.ossec_path + full_path): file_mod_time = datetime.utcfromtimestamp( stat(common.ossec_path + full_path).st_mtime) # TODO - To be deprecated if whoami == 'worker' and file_mod_time < ( datetime.utcnow() - timedelta(minutes=30)): continue # Create dict with metadata of 'full_path' file. entry_metadata = { "mod_time": str(file_mod_time), 'cluster_item_key': get_cluster_item_key } if '.merged' in entry: entry_metadata['merged'] = True entry_metadata['merge_type'] = 'agent-groups' entry_metadata['merge_name'] = dirname + '/' + entry else: entry_metadata['merged'] = False if get_md5: entry_metadata['md5'] = md5(common.ossec_path + full_path) # Use the relative file path as a key to save its metadata dictionary. walk_files[full_path] = entry_metadata if recursive and path.isdir(common.ossec_path + full_path): walk_files.update( walk_dir(full_path, recursive, files, excluded_files, excluded_extensions, get_cluster_item_key, get_md5, whoami)) except Exception as e: logger.error("Could not get checksum of file {}: {}".format( entry, e)) return walk_files
def walk_dir(dirname, recursive, files, excluded_files, excluded_extensions, get_cluster_item_key, get_md5=True, whoami='master'): walk_files = {} try: entries = listdir(common.ossec_path + dirname) except OSError as e: raise WazuhException(3015, str(e)) for entry in entries: if entry in excluded_files or reduce( add, map(lambda x: entry[-(len(x)):] == x, excluded_extensions)): continue try: full_path = path.join(dirname, entry) if entry in files or files == ["all"]: if not path.isdir(common.ossec_path + full_path): file_mod_time = datetime.utcfromtimestamp( stat(common.ossec_path + full_path).st_mtime) if whoami == 'worker' and file_mod_time < ( datetime.utcnow() - timedelta(minutes=30)): continue entry_metadata = { "mod_time": str(file_mod_time), 'cluster_item_key': get_cluster_item_key } if '.merged' in entry: entry_metadata['merged'] = True entry_metadata[ 'merge_type'] = 'agent-info' if 'agent-info' in entry else 'agent-groups' entry_metadata['merge_name'] = dirname + '/' + entry else: entry_metadata['merged'] = False if get_md5: entry_metadata['md5'] = md5(common.ossec_path + full_path) walk_files[full_path] = entry_metadata if recursive and path.isdir(common.ossec_path + full_path): walk_files.update( walk_dir(full_path, recursive, files, excluded_files, excluded_extensions, get_cluster_item_key, get_md5, whoami)) except Exception as e: logger.error("Could not get checksum of file {}: {}".format( entry, e)) return walk_files
def walk_dir(dirname, recursive, files, excluded_files, excluded_extensions, get_cluster_item_key, get_md5=True): """Iterate recursively inside a directory, save the path of each found file and obtain its metadata. Parameters ---------- dirname : str Directory within which to look for files. recursive : bool Whether to recursively look for files inside found directories. files : list List of files to obtain information from. excluded_files : list List of files to ignore. excluded_extensions : list List of extensions to ignore. get_cluster_item_key : str Key inside cluster.json['files'] to which each file belongs. This is useful to know what actions to take after sending a file from one node to another, depending on the directory the file belongs to. get_md5 : bool Whether to calculate and save the MD5 hash of the found file. Returns ------- walk_files : dict Paths (keys) and metadata (values) of the requested files found inside 'dirname'. """ walk_files = {} # Get the information collected in the previous integration process. previous_status = common.cluster_integrity_mtime.get() full_dirname = path.join(common.wazuh_path, dirname) # Get list of all files and directories inside 'full_dirname'. try: for root_, _, files_ in walk(full_dirname, topdown=True): # Check if recursive flag is set or root is actually the initial lookup directory. if recursive or root_ == full_dirname: for file_ in files_: # If file is inside 'excluded_files' or file extension is inside 'excluded_extensions', skip over. if file_ in excluded_files or any([file_.endswith(ext) for ext in excluded_extensions]): continue try: # If 'all' files have been requested or entry is in the specified files list. if files == ['all'] or file_ in files: relative_file_path = path.join(path.relpath(root_, common.wazuh_path), file_) abs_file_path = path.join(root_, file_) file_mod_time = path.getmtime(abs_file_path) try: if file_mod_time == previous_status[relative_file_path]['mod_time']: # The current file has not changed its mtime since the last integrity process. walk_files[relative_file_path] = previous_status[relative_file_path] continue except KeyError: pass # Create dict with metadata for the current file. file_metadata = {"mod_time": file_mod_time, 'cluster_item_key': get_cluster_item_key} if '.merged' in file_: file_metadata['merged'] = True file_metadata['merge_type'] = 'agent-groups' file_metadata['merge_name'] = abs_file_path else: file_metadata['merged'] = False if get_md5: file_metadata['md5'] = md5(abs_file_path) # Use the relative file path as a key to save its metadata dictionary. walk_files[relative_file_path] = file_metadata except FileNotFoundError as e: logger.debug(f"File {file_} was deleted in previous iteration: {e}") except PermissionError as e: logger.error(f"Can't read metadata from file {file_}: {e}") else: break except OSError as e: raise WazuhInternalError(3015, e) return walk_files
def walk_dir(dirname, recursive, files, excluded_files, excluded_extensions, get_cluster_item_key, get_md5=True): """Iterate recursively inside a directory, save the path of each found file and obtain its metadata. Parameters ---------- dirname : str Directory within which to look for files. recursive : bool Whether to recursively look for files inside found directories. files : list List of files to obtain information from. excluded_files : list List of files to ignore. excluded_extensions : list List of extensions to ignore. get_cluster_item_key : str Key inside cluster.json['files'] to which each file belongs. This is useful to know what actions to take after sending a file from one node to another, depending on the directory the file belongs to. get_md5 : bool Whether to calculate and save the MD5 hash of the found file. Returns ------- walk_files : dict Paths (keys) and metadata (values) of the requested files found inside 'dirname'. """ walk_files = {} # Get list of all files and directories inside 'dirname'. try: entries = listdir(os.path.join(common.wazuh_path, dirname)) except OSError as e: raise WazuhError(3015, str(e)) # Get the information collected in the previous integration process. previous_status = common.cluster_integrity_mtime.get() for entry in entries: # Relative path to listed file. full_path = path.join(dirname, entry) # If file is inside 'excluded_files' or file extension is inside 'excluded_extensions', skip over. if entry in excluded_files or any( [entry.endswith(v) for v in excluded_extensions]): continue try: # Relative path to listed file. full_path = path.join(dirname, entry) # If 'all' files have been requested or entry is in the specified files list. current_path = os.path.join(common.wazuh_path, full_path) if entry in files or files == ["all" ] and not path.isdir(current_path): file_mod_time = os.path.getmtime(current_path) try: if file_mod_time == previous_status[full_path]['mod_time']: # The current file has not changed its mtime since the last integrity process walk_files[full_path] = previous_status[full_path] continue except KeyError: pass # Create dict with metadata of 'full_path' file. entry_metadata = { "mod_time": file_mod_time, 'cluster_item_key': get_cluster_item_key } if '.merged' in entry: entry_metadata['merged'] = True entry_metadata['merge_type'] = 'agent-groups' entry_metadata['merge_name'] = os.path.join(dirname, entry) else: entry_metadata['merged'] = False if get_md5: entry_metadata['md5'] = md5( os.path.join(common.wazuh_path, full_path)) # Use the relative file path as a key to save its metadata dictionary. walk_files[full_path] = entry_metadata if recursive and path.isdir( os.path.join(common.wazuh_path, full_path)): walk_files.update( walk_dir(full_path, recursive, files, excluded_files, excluded_extensions, get_cluster_item_key, get_md5)) except Exception as e: logger.error(f"Could not get checksum of file {entry}: {e}") return walk_files