def get_requirement(requirement=None, offset=0, limit=common.database_limit, sort_by=None, sort_ascending=True, search_text=None, complementary_search=False, search_in_fields=None): """Get the requirements used in the rules :param offset: First item to return. :param limit: Maximum number of items to return. :param sort_by: Fields to sort the items by :param sort_ascending: Sort in ascending (true) or descending (false) order :param search_text: Text to search :param complementary_search: Find items without the text to search :param search_in_fields: Fields to search in :param requirement: Requirement to get :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ result = AffectedItemsWazuhResult(none_msg='No rule was returned', all_msg='All selected rules were returned') if requirement not in RULE_REQUIREMENTS: result.add_failed_item(id_=requirement, error=WazuhError(1205, extra_message=requirement, extra_remediation=f'Valid ones are {RULE_REQUIREMENTS}')) return result req = list({req for rule in get_rules(limit=None).affected_items for req in rule[requirement]}) data = process_array(req, search_text=search_text, search_in_fields=search_in_fields, complementary_search=complementary_search, sort_by=sort_by, sort_ascending=sort_ascending, offset=offset, limit=limit) result.affected_items = data['items'] result.total_affected_items = data['totalItems'] return result
def get_user_me(): """Get the information of the current user Returns ------- AffectedItemsWazuhResult with the desired information """ result = AffectedItemsWazuhResult(all_msg='Current user information was returned') affected_items = list() with AuthenticationManager() as auth: user = auth.get_user(common.current_user.get()) for index, role_id in enumerate(user['roles']): with RolesManager() as rm: role = rm.get_role_id(role_id=int(role_id)) role.pop('users') for index_r, rule_id in enumerate(role['rules']): with RulesManager() as rum: role['rules'][index_r] = rum.get_rule(rule_id=int(rule_id)) role['rules'][index_r].pop('roles') for index_p, policy_id in enumerate(role['policies']): with PoliciesManager() as pm: role['policies'][index_p] = pm.get_policy_id(policy_id=int(policy_id)) role['policies'][index_p].pop('roles') user['roles'][index] = role affected_items.append(user) if user else result.add_failed_item(id_=common.current_user.get(), error=WazuhError(5001)) data = process_array(affected_items) result.affected_items = data['items'] result.total_affected_items = data['totalItems'] return result
def get_groups(offset=0, limit=common.database_limit, sort_by=None, sort_ascending=True, search_text=None, complementary_search=False, search_in_fields=None): """Get all the groups used in the rules. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort_by: Fields to sort the items by :param sort_ascending: Sort in ascending (true) or descending (false) order :param search_text: Text to search :param complementary_search: Find items without the text to search :param search_in_fields: Fields to search in :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ result = AffectedItemsWazuhResult(none_msg='No groups in rules were returned', some_msg='Some groups in rules were not returned', all_msg='All groups in rules were returned') groups = {group for rule in get_rules(limit=None).affected_items for group in rule['groups']} data = process_array(list(groups), search_text=search_text, search_in_fields=search_in_fields, complementary_search=complementary_search, sort_by=sort_by, sort_ascending=sort_ascending, offset=offset, limit=limit) result.affected_items = data['items'] result.total_affected_items = data['totalItems'] return result
def get_policies(policy_ids, offset=0, limit=common.database_limit, sort_by=None, sort_ascending=True, search_text=None, complementary_search=False, search_in_fields=None): """Returns the information of a certain policy :param policy_ids: ID of the policy on which the information will be collected (All for all policies) :param offset: First item to return :param limit: Maximum number of items to return :param sort_by: Fields to sort the items by. Format: {"fields":["field1","field2"],"order":"asc|desc"} :param sort_ascending: Sort in ascending (true) or descending (false) order :param search_text: Text to search :param complementary_search: Find items without the text to search :param search_in_fields: Fields to search in :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ result = AffectedItemsWazuhResult(none_msg='No policy was returned', some_msg='Some policies were not returned', all_msg='All specified policies were returned') affected_items = list() with PoliciesManager() as pm: for p_id in policy_ids: policy = pm.get_policy_id(int(p_id)) if policy != SecurityError.POLICY_NOT_EXIST: affected_items.append(policy) else: # Policy id does not exist result.add_failed_item(id_=int(p_id), error=WazuhError(4007)) data = process_array(affected_items, search_text=search_text, search_in_fields=search_in_fields, complementary_search=complementary_search, sort_by=sort_by, sort_ascending=sort_ascending, offset=offset, limit=limit) result.affected_items = data['items'] result.total_affected_items = data['totalItems'] return result
def get_rules(rule_ids=None, offset=0, limit=common.database_limit, sort_by=None, sort_ascending=True, search_text=None, complementary_search=False, search_in_fields=None): """Return information from all the security rules. It does not return information from its associated roles. :param rule_ids: List of rule ids (None for all rules) :param offset: First item to return :param limit: Maximum number of items to return :param sort_by: Fields to sort the items by. Format: {"fields":["field1","field2"],"order":"asc|desc"} :param sort_ascending: Sort in ascending (true) or descending (false) order :param search_text: Text to search :param complementary_search: Find items without the text to search :param search_in_fields: Fields to search in :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ affected_items = list() result = AffectedItemsWazuhResult(none_msg='No security rule was returned', some_msg='Some security rules were not returned', all_msg='All specified security rules were returned') with RulesManager() as rum: for ru_id in rule_ids: rule = rum.get_rule(int(ru_id)) if rule != SecurityError.RULE_NOT_EXIST: affected_items.append(rule) else: # Rule id does not exist result.add_failed_item(id_=ru_id, error=WazuhError(4022)) data = process_array(affected_items, search_text=search_text, search_in_fields=search_in_fields, complementary_search=complementary_search, sort_by=sort_by, sort_ascending=sort_ascending, offset=offset, limit=limit) result.affected_items = data['items'] result.total_affected_items = data['totalItems'] return result
def get_users(user_ids: list = None, offset: int = 0, limit: int = common.database_limit, sort_by: dict = None, sort_ascending: bool = True, search_text: str = None, complementary_search: bool = False, search_in_fields: list = None): """Get the information of a specified user Parameters ---------- user_ids : list List of user ids offset : int First item to return limit : int Maximum number of items to return sort_by : dict Fields to sort the items by. Format: {"fields":["field1","field2"],"order":"asc|desc"} sort_ascending : bool Sort in ascending (true) or descending (false) order search_text : str Text to search complementary_search : bool Find items without the text to search search_in_fields : list Fields to search in Returns ------- AffectedItemsWazuhResult with the desired information """ result = AffectedItemsWazuhResult( none_msg='No user was returned', some_msg='Some users were not returned', all_msg='All specified users were returned') affected_items = list() with AuthenticationManager() as auth: for user_id in user_ids: user_id = int(user_id) user = auth.get_user_id(user_id) affected_items.append(user) if user else result.add_failed_item( id_=user_id, error=WazuhError(5001)) data = process_array(affected_items, search_text=search_text, search_in_fields=search_in_fields, complementary_search=complementary_search, sort_by=sort_by, sort_ascending=sort_ascending, offset=offset, limit=limit) result.affected_items = data['items'] result.total_affected_items = data['totalItems'] return result
def get_lists(path=None, offset=0, limit=common.database_limit, select=None, sort_by=None, sort_ascending=True, search_text=None, complementary_search=False, search_in_fields=None, relative_dirname=None, filename=None): """Get CDB lists :param path: Relative path of list file to get (if it is not specified, all lists will be returned) :param offset: First item to return. :param limit: Maximum number of items to return. :param select: List of selected fields to return :param sort_by: Fields to sort the items by :param sort_ascending: Sort in ascending (true) or descending (false) order :param search_text: Text to search :param complementary_search: Find items without the text to search :param search_in_fields: Fields to search in :param relative_dirname: Filters by relative dirname. :param filename: List of filenames to filter by. :return: AffectedItemsWazuhResult """ result = AffectedItemsWazuhResult(none_msg='No list was shown', some_msg='Some lists could not be shown', all_msg='All specified lists were shown') lists = list() for rel_p in path: if not any([ relative_dirname is not None and os.path.dirname(rel_p) != relative_dirname, filename is not None and os.path.split(rel_p)[1] not in filename ]): lists.append({ 'items': get_list_from_file(rel_p), 'relative_dirname': os.path.dirname(rel_p), 'filename': os.path.split(rel_p)[1] }) data = process_array(lists, search_text=search_text, search_in_fields=search_in_fields, complementary_search=complementary_search, sort_by=sort_by, sort_ascending=sort_ascending, offset=offset, limit=limit, select=select, allowed_sort_fields=SORT_FIELDS, required_fields=REQUIRED_FIELDS) result.affected_items = data['items'] result.total_affected_items = data['totalItems'] return result
def get_group_files(group_list=None, offset=0, limit=None, search_text=None, search_in_fields=None, complementary_search=False, sort_by=None, sort_ascending=True, hash_algorithm='md5'): """Gets the group files. :param group_list: List of Group names. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort_by: Fields to sort the items by. :param sort_ascending: Sort in ascending (true) or descending (false) order. :param search_text: Text to search. :param complementary_search: Find items without the text to search. :param search_in_fields: Fields to search in. :param hash_algorithm: hash algorithm used to get mergedsum and configsum. :return: WazuhResult. """ # We access unique group_id from list, this may change if and when we decide to add option to get files for # a list of groups group_id = group_list[0] group_path = common.shared_path result = AffectedItemsWazuhResult(all_msg='All selected groups files were returned', some_msg='Some groups files were not returned', none_msg='No groups files were returned' ) if group_id: if not Agent.group_exists(group_id): result.add_failed_item(id_=group_id, error=WazuhResourceNotFound(1710)) return result group_path = path.join(common.shared_path, group_id) if not path.exists(group_path): result.add_failed_item(id_=group_path, error=WazuhError(1006)) return result try: data = [] for entry in listdir(group_path): item = dict() item['filename'] = entry item['hash'] = get_hash(path.join(group_path, entry), hash_algorithm) data.append(item) # ar.conf ar_path = path.join(common.shared_path, 'ar.conf') data.append({'filename': "ar.conf", 'hash': get_hash(ar_path, hash_algorithm)}) data = process_array(data, search_text=search_text, search_in_fields=search_in_fields, complementary_search=complementary_search, sort_by=sort_by, sort_ascending=sort_ascending, offset=offset, limit=limit) result.affected_items = data['items'] result.total_affected_items = data['totalItems'] except WazuhError as e: result.add_failed_item(id_=group_path, error=e) raise e except Exception as e: raise WazuhInternalError(1727, extra_message=str(e)) return result
def get_lists(filename=None, offset=0, limit=common.database_limit, select=None, sort_by=None, sort_ascending=True, search_text=None, complementary_search=False, search_in_fields=None, relative_dirname=None): """Get CDB lists content. Parameters ---------- filename : list Filenames to filter by. offset : int First item to return. limit : int Maximum number of items to return. select : list List of selected fields to return. sort_by : dict Fields to sort the items by. Format: {"fields":["field1","field2"],"order":"asc|desc"} sort_ascending : boolean Sort in ascending (true) or descending (false) order. search_text : str Find items with the specified string. complementary_search : bool If True, only results NOT containing `search_text` will be returned. If False, only results that contains `search_text` will be returned. search_in_fields : str Name of the field to search in for the `search_text`. relative_dirname : str Filter by relative dirname. Returns ------- result : AffectedItemsWazuhResult Lists content. """ result = AffectedItemsWazuhResult(all_msg='All specified lists were returned', some_msg='Some lists were not returned', none_msg='No list was returned') dirname = join(common.ossec_path, relative_dirname) if relative_dirname else None lists = list() for path in get_filenames_paths(filename): # Only files which exist and whose dirname is the one specified by the user (if any), will be added to response. if not any([dirname is not None and path_dirname(path) != dirname, not isfile(path)]): lists.append({'items': [{'key': key, 'value': value} for key, value in get_list_from_file(path).items()], 'relative_dirname': path_dirname(to_relative_path(path)), 'filename': split(to_relative_path(path))[1]}) data = process_array(lists, search_text=search_text, search_in_fields=search_in_fields, complementary_search=complementary_search, sort_by=sort_by, sort_ascending=sort_ascending, offset=offset, limit=limit, select=select, allowed_sort_fields=SORT_FIELDS, required_fields=REQUIRED_FIELDS) result.affected_items = data['items'] result.total_affected_items = data['totalItems'] return result
def get_results_with_select(mitre_class: callable, filters: str, select: list, offset: int, limit: int, sort_by: dict, sort_ascending: bool, search_text: str, complementary_search: bool, search_in_fields: list, q: str) -> list: """Sanitize the select parameter and processes the list of MITRE resources. Parameters ---------- mitre_class : callable WazuhDBQueryMitre class used to obtain certain MITRE resources. filters : str Define field filters required by the user. Format: {"field1":"value1", "field2":["value2","value3"]} select : list Select which fields to return (separated by comma). offset : int First item to return. limit : int Maximum number of items to return. sort_by : dict Fields to sort the items by. Format: {"fields":["field1","field2"],"order":"asc|desc"} sort_ascending : bool Sort in ascending (true) or descending (false) order. search_text : str Text to search. complementary_search : bool Find items without the text to search. search_in_fields : list Fields to search in. q : str Query for filtering a list of results. Returns ------- list Processed MITRE resources array. """ fields_info, data = get_mitre_items(mitre_class) return process_array(data['items'], filters=filters, search_text=search_text, search_in_fields=search_in_fields, complementary_search=complementary_search, sort_by=sort_by, select=select, sort_ascending=sort_ascending, offset=offset, limit=limit, q=q, allowed_sort_fields=fields_info['allowed_fields'], allowed_select_fields=fields_info['allowed_fields'], required_fields=fields_info['min_select_fields'])
def get_path_lists(path=None, offset=0, limit=common.database_limit, sort_by=None, sort_ascending=True, search_text=None, complementary_search=False, search_in_fields=None, relative_dirname=None, filename=None): """Get paths of all CDB lists :param path: List of paths to read lists from :param offset: First item to return. :param limit: Maximum number of items to return. :param sort_by: Fields to sort the items by :param sort_ascending: Sort in ascending (true) or descending (false) order :param search_text: Text to search :param complementary_search: Find items without the text to search :param search_in_fields: Fields to search in :param relative_dirname: Filters by relative dirname. :param filename: List of filenames to filter by. :return: AffectedItemsWazuhResult """ result = AffectedItemsWazuhResult(none_msg='No path was shown', some_msg='Some paths could not be shown', all_msg='All specified paths were shown') lists = iterate_lists(only_names=True) for item in list(lists): if any([ relative_dirname is not None and item['relative_dirname'] != relative_dirname, filename is not None and item['filename'] not in filename, os.path.join(item['relative_dirname'], item['filename']) not in path ]): lists.remove(item) data = process_array(lists, search_text=search_text, search_in_fields=search_in_fields, complementary_search=complementary_search, sort_by=sort_by, sort_ascending=sort_ascending, offset=offset, limit=limit) result.affected_items = data['items'] result.total_affected_items = data['totalItems'] return result
def ossec_log(level=None, tag=None, offset=0, limit=common.database_limit, sort_by=None, sort_ascending=True, search_text=None, complementary_search=False, search_in_fields=None, q=''): """Gets logs from ossec.log. :param level: Filters by log level: all, error or info. :param tag: Filters by log category/tag (i.e. wazuh-remoted). :param offset: First item to return. :param limit: Maximum number of items to return. :param sort_by: Fields to sort the items by :param sort_ascending: Sort in ascending (true) or descending (false) order :param search_text: Text to search :param complementary_search: Find items without the text to search :param search_in_fields: Fields to search in :param q: Defines query to filter. :return: AffectedItemsWazuhResult """ result = AffectedItemsWazuhResult( all_msg=f"Logs were successfully read" f"{' in specified node' if node_id != 'manager' else ''}", some_msg='Could not read logs in some nodes', none_msg=f"Could not read logs" f"{' in specified node' if node_id != 'manager' else ''}") logs = get_ossec_logs() query = [] level and query.append(f'level={level}') tag and query.append(f'tag={tag}') q and query.append(q) query = ';'.join(query) data = process_array(logs, search_text=search_text, search_in_fields=search_in_fields, complementary_search=complementary_search, sort_by=sort_by, sort_ascending=sort_ascending, offset=offset, limit=limit, q=query) result.affected_items.extend(data['items']) result.total_affected_items = data['totalItems'] return result
def get_path_lists(filename=None, offset=0, limit=common.database_limit, sort_by=None, sort_ascending=True, search_text=None, complementary_search=False, search_in_fields=None, relative_dirname=None): """Get paths of all CDB lists. Parameters ---------- filename : list List of filenames to filter by. offset : int First item to return. limit : int Maximum number of items to return. sort_by : dict Fields to sort the items by. Format: {"fields":["field1","field2"],"order":"asc|desc"} sort_ascending : boolean Sort in ascending (true) or descending (false) order. search_text : str Find items with the specified string. complementary_search : bool If True, only results NOT containing `search_text` will be returned. If False, only results that contains `search_text` will be returned. search_in_fields : str Name of the field to search in for the `search_text`. relative_dirname : str Filter by relative dirname. Returns ------- result : AffectedItemsWazuhResult Paths of all CDB lists. """ result = AffectedItemsWazuhResult(all_msg='All specified paths were returned', some_msg='Some paths were not returned', none_msg='No path was returned') paths = get_filenames_paths(filename) lists = iterate_lists(only_names=True) for item in list(lists): if any([relative_dirname is not None and item['relative_dirname'] != relative_dirname, join(common.ossec_path, item['relative_dirname'], item['filename']) not in paths]): lists.remove(item) data = process_array(lists, search_text=search_text, search_in_fields=search_in_fields, complementary_search=complementary_search, sort_by=sort_by, sort_ascending=sort_ascending, offset=offset, limit=limit) result.affected_items = data['items'] result.total_affected_items = data['totalItems'] return result
def get_decoders_files(status=None, relative_dirname=None, filename=None, offset=0, limit=common.database_limit, sort_by=None, sort_ascending=True, search_text=None, complementary_search=False, search_in_fields=None): """Gets a list of the available decoder files. :param status: Filters by status: enabled, disabled, all. :param relative_dirname: Filters by relative dirname. :param filename: List of filenames to filter by. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort_by: Fields to sort the items by :param sort_ascending: Sort in ascending (true) or descending (false) order :param search_text: Text to search :param complementary_search: Find items without the text to search :param search_in_fields: Fields to search in :return: AffectedItemsWazuhResult """ result = AffectedItemsWazuhResult(none_msg='No decoder files were returned', some_msg='Some decoder files were not returned', all_msg='All decoder files were returned') status = check_status(status) ruleset_conf = configuration.get_ossec_conf(section='ruleset')['ruleset'] if not ruleset_conf: raise WazuhInternalError(1500) decoders_files = list() tags = ['decoder_include', 'decoder_exclude', 'decoder_dir'] if isinstance(filename, list): for f in filename: decoders_files.extend(format_rule_decoder_file( ruleset_conf, {'status': status, 'relative_dirname': relative_dirname, 'filename': f}, tags)) else: decoders_files = format_rule_decoder_file( ruleset_conf, {'status': status, 'relative_dirname': relative_dirname, 'filename': filename}, tags) data = process_array(decoders_files, search_text=search_text, search_in_fields=search_in_fields, complementary_search=complementary_search, sort_by=sort_by, sort_ascending=sort_ascending, offset=offset, limit=limit) result.affected_items = data['items'] result.total_affected_items = data['totalItems'] return result
def get_connected_nodes(self, filter_node: str = None, offset: int = 0, limit: int = common.database_limit, sort: Dict = None, search: Dict = None, select: Dict = None, filter_type: str = 'all') -> Dict: """ Return all connected nodes, including the master node :return: A dictionary containing data from each node """ def return_node(node_info: Dict) -> bool: """ Returns whether the node must be added to the result or not :param node_info: Node information :return: A boolean """ return (filter_node is None or node_info['name'] in filter_node) and ( filter_type == 'all' or node_info['type'] == filter_type) default_fields = self.to_dict()['info'].keys() if select is None: select = default_fields else: if not set(select).issubset(default_fields): raise exception.WazuhError(code=1724, extra_message=', '.join(set(select) - default_fields), extra_remediation=', '.join(default_fields)) if filter_type != 'all' and filter_type not in {'worker', 'master'}: raise exception.WazuhError(1728) if filter_node is not None: filter_node = set(filter_node) if isinstance(filter_node, list) else {filter_node} if not filter_node.issubset(set(itertools.chain(self.clients.keys(), [self.configuration['node_name']]))): raise exception.WazuhResourceNotFound(1730) res = [val.to_dict()['info'] for val in itertools.chain([self], self.clients.values()) if return_node(val.to_dict()['info'])] return utils.process_array([{k: v[k] for k in select} for v in res], search_text=search['value'] if search is not None else None, complementary_search=search['negation'] if search is not None else False, sort_by=sort['fields'] if sort is not None else None, sort_ascending=False if sort is not None and sort['order'] == 'desc' else True, allowed_sort_fields=default_fields, offset=offset, limit=limit)
def ossec_log(type_log='all', category='all', months=3, offset=0, limit=common.database_limit, sort_by=None, sort_ascending=True, search_text=None, complementary_search=False, search_in_fields=None, q=''): """Gets logs from ossec.log. :param type_log: Filters by log type: all, error or info. :param category: Filters by log category (i.e. ossec-remoted). :param months: Returns logs of the last n months. By default is 3 months. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort_by: Fields to sort the items by :param sort_ascending: Sort in ascending (true) or descending (false) order :param search_text: Text to search :param complementary_search: Find items without the text to search :param search_in_fields: Fields to search in :param q: Defines query to filter. :return: AffectedItemsWazuhResult """ result = AffectedItemsWazuhResult(all_msg=f"Logs read successfully" f"{' in specified node' if node_id != 'manager' else ''}", some_msg='Could not read logs in some nodes', none_msg=f"Could not read logs" f"{' in specified node' if node_id != 'manager' else ''}" ) logs = [] first_date = previous_month(months) statfs_error = "ERROR: statfs('******') produced error: No such file or directory" for line in tail(common.ossec_log, 2000): log_fields = get_ossec_log_fields(line) if log_fields: log_date, log_category, level, description = log_fields if log_date < first_date: continue if category != 'all': if log_category: if log_category != category: continue else: continue # We transform local time (ossec.log) to UTC with ISO8601 maintaining time integrity log_line = {'timestamp': log_date.astimezone(timezone.utc), 'tag': log_category, 'level': level, 'description': description} if type_log == 'all': logs.append(log_line) elif type_log.lower() == level.lower(): if "ERROR: statfs(" in line: if statfs_error in logs: continue else: logs.append(statfs_error) else: logs.append(log_line) else: continue else: if logs and line and log_category == logs[-1]['tag'] and level == logs[-1]['level']: logs[-1]['description'] += "\n" + line data = process_array(logs, search_text=search_text, search_in_fields=search_in_fields, complementary_search=complementary_search, sort_by=sort_by, sort_ascending=sort_ascending, offset=offset, limit=limit, q=q) result.affected_items.extend(data['items']) result.total_affected_items = data['totalItems'] return result
def get_policies(policy_ids, offset=0, limit=common.database_limit, sort_by=None, select=None, sort_ascending=True, search_text=None, complementary_search=False, search_in_fields=None): """Return the information of a certain policy. Parameters ---------- policy_ids : list ID of the policy on which the information will be collected (All for all policies) offset : int First item to return limit : int Maximum number of items to return sort_by : dict Fields to sort the items by. Format: {"fields":["field1","field2"],"order":"asc|desc"} sort_ascending : bool Sort in ascending (true) or descending (false) order search_text : str Text to search select : str Select which fields to return (separated by comma) complementary_search : bool Find items without the text to search search_in_fields : list Fields to search in Returns ------- Policies information """ result = AffectedItemsWazuhResult( none_msg='No policy was returned', some_msg='Some policies were not returned', all_msg='All specified policies were returned') affected_items = list() with PoliciesManager() as pm: for p_id in policy_ids: policy = pm.get_policy_id(int(p_id)) if policy != SecurityError.POLICY_NOT_EXIST: affected_items.append(policy) else: # Policy id does not exist result.add_failed_item(id_=int(p_id), error=WazuhError(4007)) data = process_array(affected_items, search_text=search_text, search_in_fields=search_in_fields, select=select, complementary_search=complementary_search, sort_by=sort_by, sort_ascending=sort_ascending, offset=offset, limit=limit, allowed_sort_fields=SORT_FIELDS, required_fields=REQUIRED_FIELDS) result.affected_items = data['items'] result.total_affected_items = data['totalItems'] return result
def get_rules(rule_ids=None, offset=0, limit=common.database_limit, sort_by=None, select=None, sort_ascending=True, search_text=None, complementary_search=False, search_in_fields=None): """Return information from all the security rules. It does not return information from its associated roles. Parameters ---------- rule_ids : list List of rule ids (None for all rules) offset : int First item to return limit : int, optional Maximum number of items to return sort_by : dict Fields to sort the items by. Format: {"fields":["field1","field2"],"order":"asc|desc"} sort_ascending : bool Sort in ascending (true) or descending (false) order search_text : str Text to search select : str Select which fields to return (separated by comma) complementary_search : bool Find items without the text to search search_in_fields : list Fields to search in Returns ------- Rules information """ affected_items = list() result = AffectedItemsWazuhResult( none_msg='No security rule was returned', some_msg='Some security rules were not returned', all_msg='All specified security rules were returned') with RulesManager() as rum: for ru_id in rule_ids: rule = rum.get_rule(int(ru_id)) if rule != SecurityError.RULE_NOT_EXIST: affected_items.append(rule) else: # Rule id does not exist result.add_failed_item(id_=ru_id, error=WazuhError(4022)) data = process_array(affected_items, search_text=search_text, search_in_fields=search_in_fields, select=select, complementary_search=complementary_search, sort_by=sort_by, sort_ascending=sort_ascending, offset=offset, limit=limit, allowed_sort_fields=SORT_FIELDS, required_fields=REQUIRED_FIELDS) result.affected_items = data['items'] result.total_affected_items = data['totalItems'] return result
def get_decoders(names=None, status=None, filename=None, relative_dirname=None, parents=False, offset=0, limit=common.database_limit, select=None, sort_by=None, sort_ascending=True, search_text=None, complementary_search=False, search_in_fields=None, q=''): """Gets a list of available decoders. :param names: Filters by decoder name. :param filename: List of filenames to filter by. :param status: Filters by status: enabled, disabled, all. :param relative_dirname: Filters by relative dirname. :param parents: Just parent decoders. :param offset: First item to return. :param limit: Maximum number of items to return. :param select: List of selected fields to return :param sort_by: Fields to sort the items by :param sort_ascending: Sort in ascending (true) or descending (false) order :param search_text: Text to search :param complementary_search: Find items without the text to search :param search_in_fields: Fields to search in :param q: Defines query to filter. :return: AffectedItemsWazuhResult """ result = AffectedItemsWazuhResult( none_msg='No decoder was returned', some_msg='Some decoders were not returned', all_msg='All selected decoders were returned') all_decoders = list() if names is None: names = list() for decoder_file in get_decoders_files(limit=None).affected_items: all_decoders.extend( load_decoders_from_file(decoder_file['filename'], decoder_file['relative_dirname'], decoder_file['status'])) status = check_status(status) status = ['enabled', 'disabled'] if status == 'all' else [status] parameters = { 'relative_dirname': relative_dirname, 'filename': filename, 'name': names, 'parents': parents, 'status': status } decoders = list(all_decoders) no_existent_files = names[:] for d in all_decoders: for key, value in parameters.items(): if value: if key == 'name': if d[key] not in value and d in decoders: decoders.remove(d) elif d[key] in no_existent_files: no_existent_files.remove(d[key]) elif key == 'status' and d[key] not in value and d in decoders: decoders.remove(d) elif key == 'filename' and d[ key] not in filename and d in decoders: decoders.remove(d) elif key == 'relative_dirname' and d[ key] != relative_dirname and d in decoders: decoders.remove(d) elif 'parent' in d['details'] and parents and d in decoders: decoders.remove(d) for decoder_name in no_existent_files: result.add_failed_item(id_=decoder_name, error=WazuhError(1504)) data = process_array(decoders, search_text=search_text, search_in_fields=search_in_fields, complementary_search=complementary_search, sort_by=sort_by, sort_ascending=sort_ascending, allowed_sort_fields=SORT_FIELDS, offset=offset, select=select, limit=limit, q=q, required_fields=REQUIRED_FIELDS) result.affected_items = data['items'] result.total_affected_items = data['totalItems'] return result
def get_rules(rule_ids=None, status=None, group=None, pci_dss=None, gpg13=None, gdpr=None, hipaa=None, nist_800_53=None, tsc=None, mitre=None, relative_dirname=None, filename=None, level=None, offset=0, limit=common.database_limit, select=None, sort_by=None, sort_ascending=True, search_text=None, complementary_search=False, search_in_fields=None, q=''): """Gets a list of rules. :param rule_ids: IDs of rules. :param status: Filters the rules by status. :param group: Filters the rules by group. :param pci_dss: Filters the rules by pci_dss requirement. :param gpg13: Filters the rules by gpg13 requirement. :param gdpr: Filters the rules by gdpr requirement. :param hipaa: Filters the rules by hipaa requirement. :param nist_800_53: Filters the rules by nist_800_53 requirement. :param tsc: Filters the rules by tsc requirement. :param mitre: Filters the rules by mitre attack ID. :param relative_dirname: Filters the relative dirname. :param filename: List of filenames to filter by. :param level: Filters the rules by level. level=2 or level=2-5. :param offset: First item to return. :param limit: Maximum number of items to return. :param select: List of selected fields to return :param sort_by: Fields to sort the items by :param sort_ascending: Sort in ascending (true) or descending (false) order :param search_text: Text to search :param complementary_search: Find items without the text to search :param search_in_fields: Fields to search in :param q: Defines query to filter. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ result = AffectedItemsWazuhResult( none_msg='No rule was returned', some_msg='Some rules were not returned', all_msg='All selected rules were returned') rules = list() if rule_ids is None: rule_ids = list() levels = None if level: levels = level.split('-') if len(levels) < 0 or len(levels) > 2: raise WazuhError(1203) for rule_file in get_rules_files(limit=None).affected_items: rules.extend( load_rules_from_file(rule_file['filename'], rule_file['relative_dirname'], rule_file['status'])) status = check_status(status) status = ['enabled', 'disabled'] if status == 'all' else [status] parameters = { 'groups': group, 'pci_dss': pci_dss, 'gpg13': gpg13, 'gdpr': gdpr, 'hipaa': hipaa, 'nist_800_53': nist_800_53, 'tsc': tsc, 'mitre': mitre, 'relative_dirname': relative_dirname, 'filename': filename, 'id': rule_ids, 'level': levels, 'status': status } original_rules = list(rules) no_existent_ids = rule_ids[:] for r in original_rules: if r['id'] in no_existent_ids: no_existent_ids.remove(r['id']) for key, value in parameters.items(): if value: if key == 'level' and (len(value) == 1 and int(value[0]) != r['level'] or len(value) == 2 and not int(value[0]) <= r['level'] <= int(value[1])) or \ (key == 'id' and r[key] not in value) or \ (key == 'filename' and r[key] not in filename) or \ (key == 'status' and r[key] not in value) or \ (not isinstance(value, list) and value not in r[key]): rules.remove(r) break for rule_id in no_existent_ids: result.add_failed_item(id_=rule_id, error=WazuhError(1208)) data = process_array(rules, search_text=search_text, search_in_fields=search_in_fields, complementary_search=complementary_search, select=select, sort_by=sort_by, sort_ascending=sort_ascending, allowed_sort_fields=SORT_FIELDS, offset=offset, limit=limit, q=q, required_fields=REQUIRED_FIELDS) result.affected_items = data['items'] result.total_affected_items = data['totalItems'] return result
def get_sca_checks(policy_id=None, agent_list=None, q="", offset=0, limit=common.database_limit, sort=None, search=None, select=None, filters=None): """ Get a list of checks analyzed for a policy Parameters ---------- policy_id : str Policy id to get the checks from. agent_list : list Agent id to get the policies from q : str Defines query to filter in DB. offset : int First item to return. limit : int Maximum number of items to return. sort : str Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. search : str Looks for items with the specified string. Format: {"fields": ["field1","field2"]} select : str Select fields to return. Format: {"fields":["field1","field2"]}. filters : str Define field filters required by the user. Format: {"field1":"value1", "field2":["value2","value3"]} Returns ------- AffectedItemsWazuhResult """ result = AffectedItemsWazuhResult( all_msg='All selected sca/policy information was returned', some_msg='Some sca/policy information was not returned', none_msg='No sca/policy information was returned') if len(agent_list) != 0: sca_checks = list() if agent_list[0] in get_agents_info(): fields_translation = { **fields_translation_sca_check, **fields_translation_sca_check_compliance, **fields_translation_sca_check_rule } full_select = ( list(fields_translation_sca_check.keys()) + list(fields_translation_sca_check_compliance.keys()) + list(fields_translation_sca_check_rule.keys())) # Workaround for too long sca_checks results until the chunk algorithm is implemented (1/2) db_query = WazuhDBQuerySCA(agent_id=agent_list[0], offset=0, limit=None, sort=None, filters=filters, search=None, select=full_select, count=True, get_data=True, query=f"policy_id={policy_id}", default_query=default_query_sca_check, default_sort_field='policy_id', fields=fields_translation, count_field='id') result_dict = db_query.run() if 'items' in result_dict: checks = result_dict['items'] else: raise WazuhInternalError(2007) groups = groupby(checks, key=itemgetter('id')) select_fields = full_select if select is None else select select_fields = set([ field if field != 'compliance' else 'compliance' for field in select_fields if field in fields_translation_sca_check ]) # Rearrange check and compliance fields for _, group in groups: group_list = list(group) check_dict = { k: v for k, v in group_list[0].items() if k in select_fields } for extra_field, field_translations in [ ('compliance', fields_translation_sca_check_compliance), ('rules', fields_translation_sca_check_rule) ]: if (select is None or extra_field in select) \ and set(field_translations.keys()) & group_list[0].keys(): check_dict[extra_field] = [ dict(zip(field_translations.values(), x)) for x in set(( map(itemgetter( *field_translations.keys()), group_list))) ] sca_checks.append(check_dict) else: result.add_failed_item(id_=agent_list[0], error=WazuhResourceNotFound(1701)) result.total_affected_items = 0 # Workaround for too long sca_checks results until the chunk algorithm is implemented (2/2) data = process_array( sca_checks, search_text=search['value'] if search else None, complementary_search=search['negation'] if search else False, sort_by=sort['fields'] if sort else ['policy_id'], sort_ascending=False if sort and sort['order'] == 'desc' else True, offset=offset, limit=limit, q=q) result.affected_items = data['items'] result.total_affected_items = data['totalItems'] return result
def get_connected_nodes(self, filter_node: str = None, offset: int = 0, limit: int = common.database_limit, sort: Dict = None, search: Dict = None, select: Dict = None, filter_type: str = 'all') -> Dict: """Get all connected nodes, including the master node. Parameters ---------- filter_node : str, list Node to return. offset : int First element to return. limit : int Maximum number of elements to return. sort : dict Sorts the collection by a field or fields. search : dict Looks for elements with the specified string. select : dict Select which fields to return (separated by comma). filter_type : str Type of node (worker/master). Returns ------- dict Data from each node. """ def return_node(node_info: Dict) -> bool: """Return whether the node must be added to the result or not. Parameters ---------- node_info : dict Node information. Returns ------- bool Whether the node must be added to the result or not. """ return (filter_node is None or node_info['name'] in filter_node) and (filter_type == 'all' or node_info['type'] == filter_type) default_fields = self.to_dict()['info'].keys() if select is None: select = default_fields else: if not set(select).issubset(default_fields): raise exception.WazuhError( code=1724, extra_message=', '.join(set(select) - default_fields), extra_remediation=', '.join(default_fields)) if filter_type != 'all' and filter_type not in {'worker', 'master'}: raise exception.WazuhError(1728) if filter_node is not None: filter_node = set(filter_node) if isinstance( filter_node, list) else {filter_node} if not filter_node.issubset( set( itertools.chain(self.clients.keys(), [self.configuration['node_name']]))): raise exception.WazuhResourceNotFound(1730) res = [ val.to_dict()['info'] for val in itertools.chain([self], self.clients.values()) if return_node(val.to_dict()['info']) ] return utils.process_array( [{k: v[k] for k in select} for v in res], search_text=search['value'] if search is not None else None, complementary_search=search['negation'] if search is not None else False, sort_by=sort['fields'] if sort is not None else None, sort_ascending=False if sort is not None and sort['order'] == 'desc' else True, allowed_sort_fields=default_fields, offset=offset, limit=limit)