def _get_requirement(offset, limit, sort, search, requirement): """ Get the requirements used in the rules :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :param requirement: requirement to get (pci or dgpr) :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ if requirement != 'pci' and requirement != 'gdpr': raise WazuhException(1205, requirement) req = list({ req for rule in Rule.get_rules(limit=None)['items'] for req in rule.to_dict()[requirement] }) if search: req = search_array(req, search['value'], search['negation']) if sort: req = sort_array(req, order=sort['order']) else: req = sort_array(req) return {'items': cut_array(req, offset, limit), 'totalItems': len(req)}
def get_pci(offset=0, limit=common.database_limit, sort=None, search=None): """ Get all the PCI requirements used in the rules. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ pci = set() for rule in Rule.get_rules(limit=0)['items']: for pci_item in rule.pci: pci.add(pci_item) if search: pci = search_array(pci, search['value'], search['negation']) if sort: pci = sort_array(pci, order=sort['order']) else: pci = sort_array(pci) return {'items': cut_array(pci, offset, limit), 'totalItems': len(pci)}
def get_agent_conf(group_id=None, offset=0, limit=common.database_limit, filename='agent.conf', return_format=None): """ Returns agent.conf as dictionary. :return: agent.conf as dictionary. """ agent_conf = os_path.join(common.shared_path, group_id if group_id is not None else '', filename) if not os_path.exists(agent_conf): raise WazuhException(1006, agent_conf) try: # Read RAW file if filename == 'agent.conf' and return_format and 'xml' == return_format.lower(): with open(agent_conf, 'r') as xml_data: data = xml_data.read().replace('\n', '') return data # Parse XML to JSON else: # Read XML xml_data = load_wazuh_xml(agent_conf) data = _agentconf2json(xml_data) except Exception as e: raise WazuhException(1101, str(e)) return {'totalItems': len(data), 'items': cut_array(data, offset, limit)}
def get_agent_conf_multigroup(group_id=None, offset=0, limit=common.database_limit, filename=None): """ Returns agent.conf as dictionary. :return: agent.conf as dictionary. """ if group_id: #if not Agent.multi_group_exists(group_id): #raise WazuhException(1710, group_id) agent_conf = "{0}/{1}".format(common.multi_groups_path, group_id) if filename: agent_conf_name = filename else: agent_conf_name = 'agent.conf' agent_conf += "/{0}".format(agent_conf_name) if not os_path.exists(agent_conf): raise WazuhException(1006, agent_conf) try: # Read XML xml_data = load_wazuh_xml(agent_conf) # Parse XML to JSON data = _agentconf2json(xml_data) except Exception as e: raise WazuhException(1101, str(e)) return {'totalItems': len(data), 'items': cut_array(data, offset, limit)}
def get_groups(offset=0, limit=common.database_limit, sort=None, search=None): """ Get all the groups used in the rules. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ groups = set() for rule in Rule.get_rules(limit=None)['items']: for group in rule.groups: groups.add(group) if search: groups = search_array(groups, search['value'], search['negation']) if sort: groups = sort_array(groups, order=sort['order']) else: groups = sort_array(groups) return { 'items': cut_array(groups, offset, limit), 'totalItems': len(groups) }
def get_group_files(group_id=None, offset=0, limit=common.database_limit, sort=None, search=None): """ Gets the group files. :param group_id: Group ID. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ group_path = common.shared_path if group_id: if not group_exists(group_id): raise WazuhException(1710, group_id) group_path = "{0}/{1}".format(common.shared_path, group_id) if not path.exists(group_path): raise WazuhException(1006, group_path) try: data = [] for entry in listdir(group_path): item = {} try: item['filename'] = entry with open("{0}/{1}".format(group_path, entry), 'rb') as f: item['hash'] = hashlib.md5(f.read()).hexdigest() data.append(item) except (OSError, IOError) as e: pass try: # ar.conf ar_path = "{0}/ar.conf".format(common.shared_path, entry) with open(ar_path, 'rb') as f: hash_ar = hashlib.md5(f.read()).hexdigest() data.append({'filename': "ar.conf", 'hash': hash_ar}) except (OSError, IOError) as e: pass if search: data = search_array(data, search['value'], search['negation']) if sort: data = sort_array(data, sort['fields'], sort['order']) else: data = sort_array(data, ["filename"]) return { 'items': cut_array(data, offset, limit), 'totalItems': len(data) } except Exception as e: raise WazuhException(1727, str(e))
def ossec_log(type_log='all', category='all', months=3, offset=0, limit=common.database_limit, sort=None, search=None): """ Gets logs from ossec.log. :param type_log: Filters by log type: all, error or info. :param category: Filters by log category (i.e. ossec-remoted). :param months: Returns logs of the last n months. By default is 3 months. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ logs = [] first_date = previous_month(months) statfs_error = "ERROR: statfs('******') produced error: No such file or directory" for line in tail(common.ossec_log, 2000): try: log_date = datetime.strptime(line[:10], '%Y/%m/%d') except ValueError: continue if log_date < first_date: continue if category != 'all': log_category = __get_ossec_log_category(line) if log_category: if log_category != category: continue else: continue line = line.replace('\n', '') if type_log == 'all': logs.append(line) elif type_log == 'error' and "error:" in line.lower(): if "ERROR: statfs(" in line: if statfs_error in logs: continue else: logs.append(statfs_error) else: logs.append(line) elif type_log == 'info' and "error:" not in line.lower(): logs.append(line) if search: logs = search_array(logs, search['value'], search['negation']) if sort: logs = sort_array(logs, order=sort['order']) else: logs = sort_array(logs, order='desc') return {'items': cut_array(logs, offset, limit), 'totalItems': len(logs)}
def get_decoders_files(offset=0, limit=common.database_limit, sort=None, search=None): """ Gets a list of the available decoder files. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ data = [] decoder_dirs = [] decoder_files = [] ossec_conf = configuration.get_ossec_conf() if 'rules' in ossec_conf: if 'decoder_dir' in ossec_conf['rules']: if type(ossec_conf['rules']['decoder_dir']) is list: decoder_dirs.extend(ossec_conf['rules']['decoder_dir']) else: decoder_dirs.append(ossec_conf['rules']['decoder_dir']) if 'decoder' in ossec_conf['rules']: if type(ossec_conf['rules']['decoder']) is list: decoder_files.extend(ossec_conf['rules']['decoder']) else: decoder_files.append(ossec_conf['rules']['decoder']) else: raise WazuhException(1500) for decoder_dir in decoder_dirs: path = "{0}/{1}/*_decoders.xml".format(common.ossec_path, decoder_dir) data.extend(glob(path)) for decoder_file in decoder_files: data.append("{0}/{1}".format(common.ossec_path, decoder_file)) if search: data = search_array(data, search['value'], search['negation']) if sort: data = sort_array(data, order=sort['order']) else: data = sort_array(data, order='asc') return { 'items': cut_array(data, offset, limit), 'totalItems': len(data) }
def get_rules_files(status=None, offset=0, limit=common.database_limit, sort=None, search=None): """ Gets a list of the rule files. :param status: Filters by status: enabled, disabled, all. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ data = [] status = Rule.__check_status(status) # Enabled rules ossec_conf = configuration.get_ossec_conf() if 'rules' in ossec_conf and 'include' in ossec_conf['rules']: data_enabled = ossec_conf['rules']['include'] else: raise WazuhException(1200) if status == Rule.S_ENABLED: for f in data_enabled: data.append({'name': f, 'status': 'enabled'}) else: # All rules data_all = [] rule_paths = sorted(glob("{0}/*_rules.xml".format(common.rules_path))) for rule_path in rule_paths: data_all.append(rule_path.split('/')[-1]) # Disabled for r in data_enabled: if r in data_all: data_all.remove(r) for f in data_all: # data_all = disabled data.append({'name': f, 'status': 'disabled'}) if status == Rule.S_ALL: for f in data_enabled: data.append({'name': f, 'status': 'enabled'}) if search: data = search_array(data, search['value'], search['negation']) if sort: data = sort_array(data, sort['fields'], sort['order']) else: data = sort_array(data, ['name'], 'asc') return {'items': cut_array(data, offset, limit), 'totalItems': len(data)}
def get_rules(status=None, group=None, pci=None, file=None, id=None, level=None, offset=0, limit=common.database_limit, sort=None, search=None): """ Gets a list of rules. :param status: Filters by status: enabled, disabled, all. :param group: Filters by group. :param pci: Filters by pci requirement. :param file: Filters by file of the rule. :param id: Filters by rule ID. :param level: Filters by level. It can be an integer or an range (i.e. '2-4' that means levels from 2 to 4). :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ all_rules = [] if level: levels = level.split('-') if len(levels) < 0 or len(levels) > 2: raise WazuhException(1203) for rule_file in Rule.get_rules_files(status=status, limit=0)['items']: all_rules.extend(Rule.__load_rules_from_file(rule_file['name'], rule_file['status'])) rules = list(all_rules) for r in all_rules: if group and group not in r.groups: rules.remove(r) elif pci and pci not in r.pci: rules.remove(r) elif file and file != r.file: rules.remove(r) elif id and int(id) != r.id: rules.remove(r) elif level: if len(levels) == 1: if int(levels[0]) != r.level: rules.remove(r) elif not (int(levels[0]) <= r.level <= int(levels[1])): rules.remove(r) if search: rules = search_array(rules, search['value'], search['negation']) if sort: rules = sort_array(rules, sort['fields'], sort['order'], Rule.SORT_FIELDS) else: rules = sort_array(rules, ['id'], 'asc') return {'items': cut_array(rules, offset, limit), 'totalItems': len(rules)}
def get_decoders(file=None, name=None, parents=False, offset=0, limit=common.database_limit, sort=None, search=None): """ Gets a list of available decoders. :param file: Filters by file. :param name: Filters by name. :param parents: Just parent decoders. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ all_decoders = [] for decoder_file in Decoder.get_decoders_files(limit=0)['items']: all_decoders.extend( Decoder.__load_decoders_from_file(decoder_file)) decoders = list(all_decoders) for d in all_decoders: if file and file not in d.file: decoders.remove(d) if name and name != d.name: decoders.remove(d) if parents and 'parent' in d.details: decoders.remove(d) if search: decoders = search_array(decoders, search['value'], search['negation']) if sort: decoders = sort_array(decoders, sort['fields'], sort['order'], Decoder.SORT_FIELDS) else: decoders = sort_array(decoders, ['file', 'position'], 'asc') return { 'items': cut_array(decoders, offset, limit), 'totalItems': len(decoders) }
def get_decoders_files(offset=0, limit=common.database_limit, sort=None, search=None): """ Gets a list of the available decoder files. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ data = [] decoder_dirs = [] decoder_files = [] ossec_conf = configuration.get_ossec_conf() if 'rules' in ossec_conf: if 'decoder_dir' in ossec_conf['rules']: if type(ossec_conf['rules']['decoder_dir']) is list: decoder_dirs.extend(ossec_conf['rules']['decoder_dir']) else: decoder_dirs.append(ossec_conf['rules']['decoder_dir']) if 'decoder' in ossec_conf['rules']: if type(ossec_conf['rules']['decoder']) is list: decoder_files.extend(ossec_conf['rules']['decoder']) else: decoder_files.append(ossec_conf['rules']['decoder']) else: raise WazuhException(1500) for decoder_dir in decoder_dirs: path = "{0}/{1}/*_decoders.xml".format(common.ossec_path, decoder_dir) data.extend(glob(path)) for decoder_file in decoder_files: data.append("{0}/{1}".format(common.ossec_path, decoder_file)) if search: data = search_array(data, search['value'], search['negation']) if sort: data = sort_array(data, order=sort['order']) else: data = sort_array(data, order='asc') return {'items': cut_array(data, offset, limit), 'totalItems': len(data)}
def get_nodes_api(filter_node=None, filter_type=None, offset=0, limit=common.database_limit, sort=None, search=None, select=None): request="get_nodes {}" nodes = __execute(request) valid_select_fiels = {"name", "version", "type", "ip"} valid_types = {"client", "master"} select_fields_param = {} if select: select_fields_param = set(select['fields']) if not select_fields_param.issubset(valid_select_fiels): incorrect_fields = select_fields_param - valid_select_fiels raise WazuhException(1724, "Allowed select fields: {0}. Fields {1}".\ format(', '.join(list(valid_select_fiels)), ', '.join(incorrect_fields))) if filter_type: if not filter_type in valid_types: raise WazuhException(1728, "{0} is not valid. Allowed types: {1}.".format(filter_type, ', '.join(list(valid_types)))) response = {"items":[], "totalItems":0} for node, data in nodes.items(): if (filter_node and node != filter_node) or (filter_type and data['type'] not in filter_type): continue if select: filtered_node = {} for field in select_fields_param: filtered_node.update({field:data[field]}) else: filtered_node = data response["items"].append(filtered_node) if filter_node: if len(response["items"]): return response["items"][0] else: raise WazuhException(1730, "{0}.".format(filter_node)) if search: response["items"] = search_array(response['items'], search['value'], search['negation'], fields=['name','type','version','ip']) if sort: response["items"] = sort_array(response['items'], sort['fields'], sort['order']) response["totalItems"] = len(response["items"]) if limit: response["items"] = cut_array(response["items"],int(offset),int(limit)) return response
def get_agent_conf(group_id=None, offset=0, limit=common.database_limit, filename=None): """ Returns agent.conf as dictionary. :return: agent.conf as dictionary. """ if group_id: # if not group_exists(group_id): if not os_path.exists("{0}/{1}".format(common.shared_path, group_id)): raise WazuhException(1710, group_id) agent_conf = "{0}/{1}".format(common.shared_path, group_id) if filename: agent_conf_name = filename else: agent_conf_name = 'agent.conf' agent_conf += "/{0}".format(agent_conf_name) if not os_path.exists(agent_conf): raise WazuhException(1006, agent_conf) try: # wrap the data f = open(agent_conf) txt_data = f.read() txt_data = txt_data.replace(" -- ", " -INVALID_CHAR ") f.close() txt_data = '<root_tag>' + txt_data + '</root_tag>' # Read XML xml_data = fromstring(txt_data) # Parse XML to JSON data = _agentconf2json(xml_data) except: raise WazuhException(1101) return {'totalItems': len(data), 'items': cut_array(data, offset, limit)}
def get_decoders(file=None, name=None, parents=False, offset=0, limit=common.database_limit, sort=None, search=None): """ Gets a list of available decoders. :param file: Filters by file. :param name: Filters by name. :param parents: Just parent decoders. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ all_decoders = [] for decoder_file in Decoder.get_decoders_files(limit=0)['items']: all_decoders.extend(Decoder.__load_decoders_from_file(decoder_file)) decoders = list(all_decoders) for d in all_decoders: if file and file not in d.file: decoders.remove(d) if name and name != d.name: decoders.remove(d) if parents and 'parent' in d.details: decoders.remove(d) if search: decoders = search_array(decoders, search['value'], search['negation']) if sort: decoders = sort_array(decoders, sort['fields'], sort['order'], Decoder.SORT_FIELDS) else: decoders = sort_array(decoders, ['file', 'position'], 'asc') return {'items': cut_array(decoders, offset, limit), 'totalItems': len(decoders)}
def get_all_groups(offset=0, limit=common.database_limit, sort=None, search=None, hash_algorithm='md5'): """ Gets the existing groups. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ def get_hash(file, hash_algorithm='md5'): filename = "{0}/{1}".format(common.shared_path, file) # check hash algorithm try: algorithm_list = hashlib.algorithms_available except Exception as e: algorithm_list = hashlib.algorithms if not hash_algorithm in algorithm_list: raise WazuhException( 1723, "Available algorithms are {0}.".format(algorithm_list)) hashing = hashlib.new(hash_algorithm) try: with open(filename, 'rb') as f: hashing.update(f.read()) except IOError: return None return hashing.hexdigest() # Connect DB db_global = glob(common.database_path_global) if not db_global: raise WazuhException(1600) conn = Connection(db_global[0]) query = "SELECT {0} FROM agent WHERE `group` = :group_id" # Group names data = [] for entry in listdir(common.shared_path): full_entry = path.join(common.shared_path, entry) if not path.isdir(full_entry): continue # Group count request = {'group_id': entry} conn.execute(query.format('COUNT(*)'), request) # merged.mg and agent.conf sum merged_sum = get_hash(entry + "/merged.mg") conf_sum = get_hash(entry + "/agent.conf") item = {'count': conn.fetch()[0], 'name': entry} if merged_sum: item['merged_sum'] = merged_sum if conf_sum: item['conf_sum'] = conf_sum data.append(item) if search: data = search_array(data, search['value'], search['negation'], fields=['name']) if sort: data = sort_array(data, sort['fields'], sort['order']) else: data = sort_array(data, ['name']) return {'items': cut_array(data, offset, limit), 'totalItems': len(data)}
def ossec_log(type_log='all', category='all', months=3, offset=0, limit=common.database_limit, sort=None, search=None): """ Gets logs from ossec.log. :param type_log: Filters by log type: all, error or info. :param category: Filters by log category (i.e. ossec-remoted). :param months: Returns logs of the last n months. By default is 3 months. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ logs = [] first_date = previous_month(months) statfs_error = "ERROR: statfs('******') produced error: No such file or directory" for line in tail(common.ossec_log, 2000): log_fields = __get_ossec_log_fields(line) if log_fields: log_date, log_category, level, description = log_fields if log_date < first_date: continue if category != 'all': if log_category: if log_category != category: continue else: continue log_line = { 'timestamp': str(log_date), 'tag': log_category, 'level': level, 'description': description } if type_log == 'all': logs.append(log_line) elif type_log.lower() == level.lower(): if "ERROR: statfs(" in line: if statfs_error in logs: continue else: logs.append(statfs_error) else: logs.append(log_line) else: continue else: if logs: logs[-1]['description'] += "\n" + line if search: logs = search_array(logs, search['value'], search['negation']) if sort: if sort['fields']: logs = sort_array(logs, order=sort['order'], sort_by=sort['fields']) else: logs = sort_array(logs, order=sort['order'], sort_by=['timestamp']) else: logs = sort_array(logs, order='desc', sort_by=['timestamp']) return {'items': cut_array(logs, offset, limit), 'totalItems': len(logs)}
def get_decoders_files(status=None, path=None, file=None, offset=0, limit=common.database_limit, sort=None, search=None): """ Gets a list of the available decoder files. :param status: Filters by status: enabled, disabled, all. :param path: Filters by path. :param file: Filters by filename. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ status = Decoder.__check_status(status) ruleset_conf = configuration.get_ossec_conf(section='ruleset') if not ruleset_conf: raise WazuhException(1500) tmp_data = [] tags = ['decoder_include', 'decoder_exclude'] exclude_filenames = [] for tag in tags: if tag in ruleset_conf: item_status = Decoder.S_DISABLED if tag == 'decoder_exclude' else Decoder.S_ENABLED if type(ruleset_conf[tag]) is list: items = ruleset_conf[tag] else: items = [ruleset_conf[tag]] for item in items: item_name = os.path.basename(item) full_dir = os.path.dirname(item) item_dir = os.path.relpath( full_dir if full_dir else common.ruleset_rules_path, start=common.ossec_path) if tag == 'decoder_exclude': exclude_filenames.append(item_name) else: tmp_data.append({ 'file': item_name, 'path': item_dir, 'status': item_status }) tag = 'decoder_dir' if tag in ruleset_conf: if type(ruleset_conf[tag]) is list: items = ruleset_conf[tag] else: items = [ruleset_conf[tag]] for item_dir in items: all_decoders = "{0}/{1}/*.xml".format(common.ossec_path, item_dir) for item in glob(all_decoders): item_name = os.path.basename(item) item_dir = os.path.relpath(os.path.dirname(item), start=common.ossec_path) if item_name in exclude_filenames: item_status = Decoder.S_DISABLED else: item_status = Decoder.S_ENABLED tmp_data.append({ 'file': item_name, 'path': item_dir, 'status': item_status }) data = list(tmp_data) for d in tmp_data: if status and status != 'all' and status != d['status']: data.remove(d) continue if path and path != d['path']: data.remove(d) continue if file and file != d['file']: data.remove(d) continue if search: data = search_array(data, search['value'], search['negation']) if sort: data = sort_array(data, sort['fields'], sort['order']) else: data = sort_array(data, ['file'], 'asc') return { 'items': cut_array(data, offset, limit), 'totalItems': len(data) }
def get_decoders(offset=0, limit=common.database_limit, sort=None, search=None, filters={}, q=''): """ Gets a list of available decoders. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :param filters: Defines field filters required by the user. Format: {"field1":"value1", "field2":["value2","value3"]}. This filter is used for filtering by 'status', 'path', 'file', 'name' and 'parents'. :param q: Defines query to filter. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ # set default values to parameters status = filters.get('status', None) path = filters.get('path', None) file_ = filters.get('file', None) name = filters.get('name', None) parents = filters.get('parents', None) status = Decoder.__check_status(status) all_decoders = [] for decoder_file in Decoder.get_decoders_files(status=status, limit=None)['items']: all_decoders.extend( Decoder.__load_decoders_from_file(decoder_file['file'], decoder_file['path'], decoder_file['status'])) decoders = list(all_decoders) for d in all_decoders: if path and path != d.path: decoders.remove(d) continue if file_ and file_ != d.file: decoders.remove(d) continue if name and name != d.name: decoders.remove(d) continue if parents and 'parent' in d.details: decoders.remove(d) continue if search: decoders = search_array(decoders, search['value'], search['negation']) if q: # decoders contains a list of Decoder objects, it is necessary to cast it into dictionaries decoders = filter_array_by_query( q, [decoder.to_dict() for decoder in decoders]) if sort: decoders = sort_array(decoders, sort['fields'], sort['order'], Decoder.SORT_FIELDS) else: decoders = sort_array(decoders, ['file', 'position'], 'asc') return { 'items': cut_array(decoders, offset, limit), 'totalItems': len(decoders) }
def ossec_log(months=3, offset=0, limit=common.database_limit, sort=None, search=None, filters={}, q=''): """ Gets logs from ossec.log. :param months: Returns logs of the last n months. By default is 3 months. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :param filters: Defines field filters required by the user. Format: {"field1":"value1", "field2":["value2","value3"]}. This filter is used for filtering by 'type_log' (all, error or info) or 'category' (i.e. ossec-remoted). :param q: Defines query to filter. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ # set default values to 'type_log' and 'category' parameters type_log = filters.get('type_log', 'all') category = filters.get('category', 'all') logs = [] first_date = previous_month(months) statfs_error = "ERROR: statfs('******') produced error: No such file or directory" for line in tail(common.ossec_log, 2000): log_fields = __get_ossec_log_fields(line) if log_fields: log_date, log_category, level, description = log_fields if log_date < first_date: continue if category != 'all': if log_category: if log_category != category: continue else: continue # We transform local time (ossec.log) to UTC maintaining time integrity and log format log_line = { 'timestamp': log_date.astimezone( timezone.utc).strftime('%Y-%m-%d %H:%M:%S'), 'tag': log_category, 'level': level, 'description': description } if type_log == 'all': logs.append(log_line) elif type_log.lower() == level.lower(): if "ERROR: statfs(" in line: if statfs_error in logs: continue else: logs.append(statfs_error) else: logs.append(log_line) else: continue else: if logs and line and log_category == logs[-1][ 'tag'] and level == logs[-1]['level']: logs[-1]['description'] += "\n" + line if search: logs = search_array(logs, search['value'], search['negation']) if q: logs = filter_array_by_query(q, logs) if sort: if sort['fields']: logs = sort_array(logs, order=sort['order'], sort_by=sort['fields']) else: logs = sort_array(logs, order=sort['order'], sort_by=['timestamp']) else: logs = sort_array(logs, order='desc', sort_by=['timestamp']) return {'items': cut_array(logs, offset, limit), 'totalItems': len(logs)}
def get_rules(offset=0, limit=common.database_limit, sort=None, search=None, filters={}, q=''): """ Gets a list of rules. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :param filters: Defines field filters required by the user. Format: {"field1":"value1", "field2":["value2","value3"]}. This filter is used for filtering by 'status', 'group', 'pci', 'gpg13', 'gdpr', 'hipaa', 'nist-800-53', 'file', 'path', 'id' and 'level'. :param q: Defines query to filter. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ # set default values to parameters status = filters.get('status', None) group = filters.get('group', None) pci = filters.get('pci', None) gpg13 = filters.get('gpg13', None) gdpr = filters.get('gdpr', None) hipaa = filters.get('hipaa', None) nist_800_53 = filters.get('nist-800-53', None) path = filters.get('path', None) file_ = filters.get('file', None) id_ = filters.get('id', None) level = filters.get('level', None) all_rules = [] if level: levels = level.split('-') if len(levels) < 0 or len(levels) > 2: raise WazuhException(1203) for rule_file in Rule.get_rules_files(status=status, limit=None)['items']: all_rules.extend( Rule.__load_rules_from_file(rule_file['file'], rule_file['path'], rule_file['status'])) rules = list(all_rules) for r in all_rules: if group and group not in r.groups: rules.remove(r) continue elif pci and pci not in r.pci: rules.remove(r) continue elif gpg13 and gpg13 not in r.gpg13: rules.remove(r) continue elif gdpr and gdpr not in r.gdpr: rules.remove(r) continue elif hipaa and hipaa not in r.hipaa: rules.remove(r) continue elif nist_800_53 and nist_800_53 not in r.nist_800_53: rules.remove(r) continue elif path and path != r.path: rules.remove(r) continue elif file_ and file_ != r.file: rules.remove(r) continue elif id_ and int(id_) != r.id: rules.remove(r) continue elif level: if len(levels) == 1: if int(levels[0]) != r.level: rules.remove(r) continue elif not (int(levels[0]) <= r.level <= int(levels[1])): rules.remove(r) continue if search: rules = search_array(rules, search['value'], search['negation']) if q: # rules contains a list of Rule objects, it is necessary to cast it into dictionaries rules = filter_array_by_query(q, [rule.to_dict() for rule in rules]) if sort: rules = sort_array(rules, sort['fields'], sort['order'], Rule.SORT_FIELDS) else: rules = sort_array(rules, ['id'], 'asc') return { 'items': cut_array(rules, offset, limit), 'totalItems': len(rules) }
def get_connected_nodes(self, filter_node=None, offset=0, limit=common.database_limit, sort=None, search=None, select=None, filter_type='all') -> Dict: """ Return all connected nodes, including the master node :return: A dictionary containing data from each node """ def return_node(node_info): return (filter_node is None or node_info['name'] in filter_node) and (filter_type == 'all' or node_info['type'] == filter_type) default_fields = self.to_dict()['info'].keys() if select is None: select = {'fields': default_fields} else: if not set(select['fields']).issubset(default_fields): raise exception.WazuhException( 1724, "Allowed fields: {}. Fields: {}".format( ', '.join(default_fields), ', '.join(set(select['fields']) - default_fields))) if filter_type != 'all' and filter_type not in {'worker', 'master'}: raise exception.WazuhException( 1728, "Valid types are 'worker' and 'master'.") if filter_node is not None: filter_node = set(filter_node) if isinstance( filter_node, list) else {filter_node} if not filter_node.issubset( set( itertools.chain(self.clients.keys(), [self.configuration['node_name']]))): raise exception.WazuhException(1730) res = [ val.to_dict()['info'] for val in itertools.chain([self], self.clients.values()) if return_node(val.to_dict()['info']) ] if sort is not None: res = utils.sort_array(array=res, sort_by=sort['fields'], order=sort['order'], allowed_sort_fields=default_fields) if search is not None: res = utils.search_array(array=res, text=search['value'], negation=search['negation']) return { 'totalItems': len(res), 'items': utils.cut_array([{k: v[k] for k in select['fields']} for v in res], offset, limit) }