Example #1
0
    def get_pci(offset=0, limit=common.database_limit, sort=None, search=None):
        """
        Get all the PCI requirements used in the rules.

        :param offset: First item to return.
        :param limit: Maximum number of items to return.
        :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
        :param search: Looks for items with the specified string.
        :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
        """
        pci = set()

        for rule in Rule.get_rules(limit=0)['items']:
            for pci_item in rule.pci:
                pci.add(pci_item)

        if search:
            pci = search_array(pci, search['value'], search['negation'])

        if sort:
            pci = sort_array(pci, order=sort['order'])
        else:
            pci = sort_array(pci)

        return {'items': cut_array(pci, offset, limit), 'totalItems': len(pci)}
Example #2
0
def ossec_log(type_log='all', category='all', months=3, offset=0, limit=common.database_limit, sort=None, search=None):
    """
    Gets logs from ossec.log.

    :param type_log: Filters by log type: all, error or info.
    :param category: Filters by log category (i.e. ossec-remoted).
    :param months: Returns logs of the last n months. By default is 3 months.
    :param offset: First item to return.
    :param limit: Maximum number of items to return.
    :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
    :param search: Looks for items with the specified string.
    :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
    """
    logs = []

    first_date = previous_month(months)
    statfs_error = "ERROR: statfs('******') produced error: No such file or directory"

    for line in tail(common.ossec_log, 2000):
        try:
            log_date = datetime.strptime(line[:10], '%Y/%m/%d')
        except ValueError:
            continue

        if log_date < first_date:
            continue

        if category != 'all':
            log_category = __get_ossec_log_category(line)

            if log_category:
                if log_category != category:
                    continue
            else:
                continue

        line = line.replace('\n', '')
        if type_log == 'all':
            logs.append(line)
        elif type_log == 'error' and "error:" in line.lower():
            if "ERROR: statfs(" in line:
                if statfs_error in logs:
                    continue
                else:
                    logs.append(statfs_error)
            else:
                logs.append(line)
        elif type_log == 'info' and "error:" not in line.lower():
            logs.append(line)

    if search:
        logs = search_array(logs, search['value'], search['negation'])

    if sort:
        logs = sort_array(logs, order=sort['order'])
    else:
        logs = sort_array(logs, order='desc')

    return {'items': cut_array(logs, offset, limit), 'totalItems': len(logs)}
Example #3
0
    def get_rules_files(status=None, offset=0, limit=common.database_limit, sort=None, search=None):
        """
        Gets a list of the rule files.

        :param status: Filters by status: enabled, disabled, all.
        :param offset: First item to return.
        :param limit: Maximum number of items to return.
        :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
        :param search: Looks for items with the specified string.
        :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
        """
        data = []

        status = Rule.__check_status(status)

        # Enabled rules
        ossec_conf = configuration.get_ossec_conf()

        if 'rules' in ossec_conf and 'include' in ossec_conf['rules']:
            data_enabled = ossec_conf['rules']['include']
        else:
            raise WazuhException(1200)

        if status == Rule.S_ENABLED:
            for f in data_enabled:
                data.append({'name': f, 'status': 'enabled'})
        else:
            # All rules
            data_all = []
            rule_paths = sorted(glob("{0}/*_rules.xml".format(common.rules_path)))
            for rule_path in rule_paths:
                data_all.append(rule_path.split('/')[-1])

            # Disabled
            for r in data_enabled:
                if r in data_all:
                    data_all.remove(r)
            for f in data_all:  # data_all = disabled
                data.append({'name': f, 'status': 'disabled'})

            if status == Rule.S_ALL:
                for f in data_enabled:
                    data.append({'name': f, 'status': 'enabled'})

        if search:
            data = search_array(data, search['value'], search['negation'])

        if sort:
            data = sort_array(data, sort['fields'], sort['order'])
        else:
            data = sort_array(data, ['name'], 'asc')

        return {'items': cut_array(data, offset, limit), 'totalItems': len(data)}
Example #4
0
    def get_rules(status=None, group=None, pci=None, file=None, id=None, level=None, offset=0, limit=common.database_limit, sort=None, search=None):
        """
        Gets a list of rules.

        :param status: Filters by status: enabled, disabled, all.
        :param group: Filters by group.
        :param pci: Filters by pci requirement.
        :param file: Filters by file of the rule.
        :param id: Filters by rule ID.
        :param level: Filters by level. It can be an integer or an range (i.e. '2-4' that means levels from 2 to 4).
        :param offset: First item to return.
        :param limit: Maximum number of items to return.
        :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
        :param search: Looks for items with the specified string.
        :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
        """
        all_rules = []

        if level:
            levels = level.split('-')
            if len(levels) < 0 or len(levels) > 2:
                raise WazuhException(1203)

        for rule_file in Rule.get_rules_files(status=status, limit=0)['items']:
            all_rules.extend(Rule.__load_rules_from_file(rule_file['name'], rule_file['status']))

        rules = list(all_rules)
        for r in all_rules:
            if group and group not in r.groups:
                rules.remove(r)
            elif pci and pci not in r.pci:
                rules.remove(r)
            elif file and file != r.file:
                rules.remove(r)
            elif id and int(id) != r.id:
                rules.remove(r)
            elif level:
                if len(levels) == 1:
                    if int(levels[0]) != r.level:
                        rules.remove(r)
                elif not (int(levels[0]) <= r.level <= int(levels[1])):
                        rules.remove(r)

        if search:
            rules = search_array(rules, search['value'], search['negation'])

        if sort:
            rules = sort_array(rules, sort['fields'], sort['order'], Rule.SORT_FIELDS)
        else:
            rules = sort_array(rules, ['id'], 'asc')

        return {'items': cut_array(rules, offset, limit), 'totalItems': len(rules)}
Example #5
0
    def get_decoders_files(offset=0, limit=common.database_limit, sort=None, search=None):
        """
        Gets a list of the available decoder files.

        :param offset: First item to return.
        :param limit: Maximum number of items to return.
        :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
        :param search: Looks for items with the specified string.
        :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
        """

        data = []
        decoder_dirs = []
        decoder_files = []

        ossec_conf = configuration.get_ossec_conf()

        if 'rules' in ossec_conf:
            if 'decoder_dir' in ossec_conf['rules']:
                if type(ossec_conf['rules']['decoder_dir']) is list:
                    decoder_dirs.extend(ossec_conf['rules']['decoder_dir'])
                else:
                    decoder_dirs.append(ossec_conf['rules']['decoder_dir'])
            if 'decoder' in ossec_conf['rules']:
                if type(ossec_conf['rules']['decoder']) is list:
                    decoder_files.extend(ossec_conf['rules']['decoder'])
                else:
                    decoder_files.append(ossec_conf['rules']['decoder'])
        else:
            raise WazuhException(1500)

        for decoder_dir in decoder_dirs:
            path = "{0}/{1}/*_decoders.xml".format(common.ossec_path, decoder_dir)
            data.extend(glob(path))

        for decoder_file in decoder_files:
            data.append("{0}/{1}".format(common.ossec_path, decoder_file))

        if search:
            data = search_array(data, search['value'], search['negation'])

        if sort:
            data = sort_array(data, order=sort['order'])
        else:
            data = sort_array(data, order='asc')

        return {'items': cut_array(data, offset, limit), 'totalItems': len(data)}
Example #6
0
    def get_decoders(file=None, name=None, parents=False, offset=0, limit=common.database_limit, sort=None, search=None):
        """
        Gets a list of available decoders.

        :param file: Filters by file.
        :param name: Filters by name.
        :param parents: Just parent decoders.
        :param offset: First item to return.
        :param limit: Maximum number of items to return.
        :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
        :param search: Looks for items with the specified string.
        :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
        """
        all_decoders = []

        for decoder_file in Decoder.get_decoders_files(limit=0)['items']:
            all_decoders.extend(Decoder.__load_decoders_from_file(decoder_file))

        decoders = list(all_decoders)
        for d in all_decoders:
            if file and file not in d.file:
                decoders.remove(d)
            if name and name != d.name:
                decoders.remove(d)
            if parents and 'parent' in d.details:
                decoders.remove(d)

        if search:
            decoders = search_array(decoders, search['value'], search['negation'])

        if sort:
            decoders = sort_array(decoders, sort['fields'], sort['order'], Decoder.SORT_FIELDS)
        else:
            decoders = sort_array(decoders, ['file', 'position'], 'asc')

        return {'items': cut_array(decoders, offset, limit), 'totalItems': len(decoders)}
Example #7
0
    def get_rules(offset=0, limit=common.database_limit, sort=None, search=None, filters={}, q=''):
        """
        Gets a list of rules.

        :param offset: First item to return.
        :param limit: Maximum number of items to return.
        :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
        :param search: Looks for items with the specified string.
        :param filters: Defines field filters required by the user. Format: {"field1":"value1", "field2":["value2","value3"]}.
            This filter is used for filtering by 'status', 'group', 'pci', 'gpg13', 'gdpr', 'hipaa', 'nist-800-53',
            'file', 'path', 'id' and 'level'.
        :param q: Defines query to filter.

        :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
        """
        # set default values to parameters
        status = filters.get('status', None)
        group = filters.get('group', None)
        pci = filters.get('pci', None)
        gpg13 = filters.get('gpg13', None)
        gdpr = filters.get('gdpr', None)
        hipaa = filters.get('hipaa', None)
        nist_800_53 = filters.get('nist-800-53', None)
        path = filters.get('path', None)
        file_ = filters.get('file', None)
        id_ = filters.get('id', None)
        level = filters.get('level', None)

        all_rules = []

        if level:
            levels = level.split('-')
            if len(levels) < 0 or len(levels) > 2:
                raise WazuhException(1203)

        for rule_file in Rule.get_rules_files(status=status, limit=None)['items']:
            all_rules.extend(Rule.__load_rules_from_file(rule_file['file'], rule_file['path'], rule_file['status']))

        rules = list(all_rules)
        for r in all_rules:
            if group and group not in r.groups:
                rules.remove(r)
                continue
            elif pci and pci not in r.pci:
                rules.remove(r)
                continue
            elif gpg13 and gpg13 not in r.gpg13:
                rules.remove(r)
                continue
            elif gdpr and gdpr not in r.gdpr:
                rules.remove(r)
                continue
            elif hipaa and hipaa not in r.hipaa:
                rules.remove(r)
                continue
            elif nist_800_53 and nist_800_53 not in r.nist_800_53:
                rules.remove(r)
                continue
            elif path and path != r.path:
                rules.remove(r)
                continue
            elif file_ and file_ != r.file:
                rules.remove(r)
                continue
            elif id_ and int(id_) != r.id:
                rules.remove(r)
                continue
            elif level:
                if len(levels) == 1:
                    if int(levels[0]) != r.level:
                        rules.remove(r)
                        continue
                elif not (int(levels[0]) <= r.level <= int(levels[1])):
                        rules.remove(r)
                        continue

        if search:
            rules = search_array(rules, search['value'], search['negation'])

        if q:
            # rules contains a list of Rule objects, it is necessary to cast it into dictionaries
            rules = filter_array_by_query(q, [rule.to_dict() for rule in rules])

        if sort:
            rules = sort_array(rules, sort['fields'], sort['order'], Rule.SORT_FIELDS)
        else:
            rules = sort_array(rules, ['id'], 'asc')

        return {'items': cut_array(rules, offset, limit), 'totalItems': len(rules)}
Example #8
0
    def get_decoders(offset=0,
                     limit=common.database_limit,
                     sort=None,
                     search=None,
                     filters={},
                     q=''):
        """
        Gets a list of available decoders.

        :param offset: First item to return.
        :param limit: Maximum number of items to return.
        :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
        :param search: Looks for items with the specified string.
        :param filters: Defines field filters required by the user. Format: {"field1":"value1", "field2":["value2","value3"]}.
            This filter is used for filtering by 'status', 'path', 'file', 'name' and 'parents'.
        :param q: Defines query to filter.

        :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
        """
        # set default values to parameters
        status = filters.get('status', None)
        path = filters.get('path', None)
        file_ = filters.get('file', None)
        name = filters.get('name', None)
        parents = filters.get('parents', None)

        status = Decoder.__check_status(status)
        all_decoders = []

        for decoder_file in Decoder.get_decoders_files(status=status,
                                                       limit=None)['items']:
            all_decoders.extend(
                Decoder.__load_decoders_from_file(decoder_file['file'],
                                                  decoder_file['path'],
                                                  decoder_file['status']))

        decoders = list(all_decoders)
        for d in all_decoders:
            if path and path != d.path:
                decoders.remove(d)
                continue
            if file_ and file_ != d.file:
                decoders.remove(d)
                continue
            if name and name != d.name:
                decoders.remove(d)
                continue
            if parents and 'parent' in d.details:
                decoders.remove(d)
                continue

        if search:
            decoders = search_array(decoders, search['value'],
                                    search['negation'])

        if q:
            # decoders contains a list of Decoder objects, it is necessary to cast it into dictionaries
            decoders = filter_array_by_query(
                q, [decoder.to_dict() for decoder in decoders])

        if sort:
            decoders = sort_array(decoders, sort['fields'], sort['order'],
                                  Decoder.SORT_FIELDS)
        else:
            decoders = sort_array(decoders, ['file', 'position'], 'asc')

        return {
            'items': cut_array(decoders, offset, limit),
            'totalItems': len(decoders)
        }
Example #9
0
    def get_connected_nodes(self,
                            filter_node: str = None,
                            offset: int = 0,
                            limit: int = common.database_limit,
                            sort: Dict = None,
                            search: Dict = None,
                            select: Dict = None,
                            filter_type: str = 'all') -> Dict:
        """
        Return all connected nodes, including the master node
        :return: A dictionary containing data from each node
        """
        def return_node(node_info: Dict) -> bool:
            """
            Returns whether the node must be added to the result or not
            :param node_info: Node information
            :return: A boolean
            """
            return (filter_node is None or node_info['name']
                    in filter_node) and (filter_type == 'all'
                                         or node_info['type'] == filter_type)

        default_fields = self.to_dict()['info'].keys()
        if select is None:
            select = {'fields': default_fields}
        else:
            if not set(select['fields']).issubset(default_fields):
                raise exception.WazuhException(
                    1724, "Allowed fields: {}. Fields: {}".format(
                        ', '.join(default_fields),
                        ', '.join(set(select['fields']) - default_fields)))

        if filter_type != 'all' and filter_type not in {'worker', 'master'}:
            raise exception.WazuhException(
                1728, "Valid types are 'worker' and 'master'.")

        if filter_node is not None:
            filter_node = set(filter_node) if isinstance(
                filter_node, list) else {filter_node}
            if not filter_node.issubset(
                    set(
                        itertools.chain(self.clients.keys(),
                                        [self.configuration['node_name']]))):
                raise exception.WazuhException(1730)

        res = [
            val.to_dict()['info']
            for val in itertools.chain([self], self.clients.values())
            if return_node(val.to_dict()['info'])
        ]

        if sort is not None:
            res = utils.sort_array(array=res,
                                   sort_by=sort['fields'],
                                   order=sort['order'],
                                   allowed_sort_fields=default_fields)
        if search is not None:
            res = utils.search_array(array=res,
                                     text=search['value'],
                                     negation=search['negation'])

        return {
            'totalItems':
            len(res),
            'items':
            utils.cut_array([{k: v[k]
                              for k in select['fields']}
                             for v in res], offset, limit)
        }
Example #10
0
def ossec_log(months=3, offset=0, limit=common.database_limit, sort=None, search=None, filters={}, q=''):
    """
    Gets logs from ossec.log.

    :param months: Returns logs of the last n months. By default is 3 months.
    :param offset: First item to return.
    :param limit: Maximum number of items to return.
    :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
    :param search: Looks for items with the specified string.
    :param filters: Defines field filters required by the user. Format: {"field1":"value1", "field2":["value2","value3"]}.
            This filter is used for filtering by 'type_log' (all, error or info) or 'category' (i.e. ossec-remoted).
    :param q: Defines query to filter.
    :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
    """
    # set default values to 'type_log' and 'category' parameters
    type_log = filters.get('type_log', 'all')
    category = filters.get('category', 'all')

    logs = []

    first_date = previous_month(months)
    statfs_error = "ERROR: statfs('******') produced error: No such file or directory"

    for line in tail(common.ossec_log, 2000):
        log_fields = __get_ossec_log_fields(line)
        if log_fields:
            log_date, log_category, level, description = log_fields

            if log_date < first_date:
                continue

            if category != 'all':
                if log_category:
                    if log_category != category:
                        continue
                else:
                    continue

            # We transform local time (ossec.log) to UTC maintaining time integrity and log format
            log_line = {'timestamp': log_date.astimezone(timezone.utc).strftime('%Y-%m-%d %H:%M:%S'),
                        'tag': log_category, 'level': level, 'description': description}
            if type_log == 'all':
                logs.append(log_line)
            elif type_log.lower() == level.lower():
                if "ERROR: statfs(" in line:
                    if statfs_error in logs:
                        continue
                    else:
                        logs.append(statfs_error)
                else:
                    logs.append(log_line)
            else:
                continue
        else:
            if logs and line and log_category == logs[-1]['tag'] and level == logs[-1]['level']:
                logs[-1]['description'] += "\n" + line

    if search:
        logs = search_array(logs, search['value'], search['negation'])

    if q:
        logs = filter_array_by_query(q, logs)

    if sort:
        if sort['fields']:
            logs = sort_array(logs, order=sort['order'], sort_by=sort['fields'])
        else:
            logs = sort_array(logs, order=sort['order'], sort_by=['timestamp'])
    else:
        logs = sort_array(logs, order='desc', sort_by=['timestamp'])

    return {'items': cut_array(logs, offset, limit), 'totalItems': len(logs)}
Example #11
0
def get_nodes_api(filter_node=None,
                  filter_type=None,
                  offset=0,
                  limit=common.database_limit,
                  sort=None,
                  search=None,
                  select=None):
    request = "get_nodes {}"
    nodes = execute(request)

    if nodes.get("err"):
        raise WazuhException(3016, "{}".format(nodes['err']))

    valid_select_fiels = {"name", "version", "type", "ip"}
    valid_types = {"worker", "master"}
    select_fields_param = {}

    if select:
        select_fields_param = set(select['fields'])
        if not select_fields_param.issubset(valid_select_fiels):
            incorrect_fields = select_fields_param - valid_select_fiels
            raise WazuhException(1724, "Allowed select fields: {0}. Fields {1}".\
                    format(', '.join(list(valid_select_fiels)), ', '.join(incorrect_fields)))
    if filter_type:
        if not filter_type in valid_types:
            raise WazuhException(
                1728, "{0} is not valid. Allowed types: {1}.".format(
                    filter_type, ', '.join(list(valid_types))))

    response = {"items": [], "totalItems": 0}
    for node, data in nodes.items():
        if (filter_node and node != filter_node) or (
                filter_type and data['type'] not in filter_type):
            continue
        if select:
            filtered_node = {}
            for field in select_fields_param:
                filtered_node.update({field: data[field]})
        else:
            filtered_node = data
        response["items"].append(filtered_node)

    if filter_node:
        if len(response["items"]):
            return response["items"][0]
        else:
            raise WazuhException(1730, "{0}.".format(filter_node))

    if search:
        response["items"] = search_array(
            response['items'],
            search['value'],
            search['negation'],
            fields=['name', 'type', 'version', 'ip'])
    if sort:
        response["items"] = sort_array(response['items'], sort['fields'],
                                       sort['order'])

    response["totalItems"] = len(response["items"])

    if limit:
        response["items"] = cut_array(response["items"], int(offset),
                                      int(limit))

    return response
Example #12
0
def merge_results(responses, final_json, input_json):
    """
    Merge results from an API call.

    To do the merging process, the following is considered:
        1.- If the field is a list, append items to it
        2.- If the field is a message (msg), only replace it if the new message has more priority.
        3.- If the field is a integer:
            * if it's totalItems, sum
            * if it's an error, only replace it if its value is higher

    The priorities are defined in a list of tuples. The first item of the tuple is the element which has more priority.

    :param responses: list of results from each node
    :param final_json: JSON to return.
    :return: single JSON with the final result
    """
    priorities = {("Some agents were not restarted",
                   "All selected agents were restarted")}

    for local_json in responses:
        for key, field in local_json.items():
            field_type = type(field)
            if field_type == dict:
                final_json[key] = merge_results(
                    [field], {} if key not in final_json else final_json[key],
                    input_json)
            elif field_type == list:
                if key in final_json:
                    final_json[key].extend([
                        elem for elem in field if elem not in final_json[key]
                    ])
                else:
                    final_json[key] = field
            elif field_type == int:
                if key in final_json:
                    if key == 'totalItems':
                        final_json[key] += field
                    elif key == 'error' and final_json[key] < field:
                        final_json[key] = field
                else:
                    final_json[key] = field
            else:  # str
                if key in final_json:
                    if (field, final_json[key]) in priorities:
                        final_json[key] = field
                else:
                    final_json[key] = field

    if 'data' in final_json and 'items' in final_json['data'] and isinstance(
            final_json['data']['items'], list):
        if 'offset' not in input_json['arguments']:
            input_json['arguments']['offset'] = 0
        if 'limit' not in input_json['arguments']:
            input_json['arguments']['limit'] = common.database_limit

        if 'sort' in input_json['arguments']:
            final_json['data']['items'] = sort_array(
                final_json['data']['items'],
                input_json['arguments']['sort']['fields'],
                input_json['arguments']['sort']['order'])

        offset, limit = input_json['arguments']['offset'], input_json[
            'arguments']['limit']
        final_json['data']['items'] = final_json['data']['items'][
            offset:offset + limit]

    return final_json
Example #13
0
def ossec_log(type_log='all',
              category='all',
              months=3,
              offset=0,
              limit=common.database_limit,
              sort=None,
              search=None):
    """
    Gets logs from ossec.log.

    :param type_log: Filters by log type: all, error or info.
    :param category: Filters by log category (i.e. ossec-remoted).
    :param months: Returns logs of the last n months. By default is 3 months.
    :param offset: First item to return.
    :param limit: Maximum number of items to return.
    :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
    :param search: Looks for items with the specified string.
    :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
    """
    logs = []

    first_date = previous_month(months)
    statfs_error = "ERROR: statfs('******') produced error: No such file or directory"

    for line in tail(common.ossec_log, 2000):
        log_fields = __get_ossec_log_fields(line)
        if log_fields:
            log_date, log_category, level, description = log_fields

            if log_date < first_date:
                continue

            if category != 'all':
                if log_category:
                    if log_category != category:
                        continue
                else:
                    continue

            log_line = {
                'timestamp': str(log_date),
                'tag': log_category,
                'level': level,
                'description': description
            }
            if type_log == 'all':
                logs.append(log_line)
            elif type_log.lower() == level.lower():
                if "ERROR: statfs(" in line:
                    if statfs_error in logs:
                        continue
                    else:
                        logs.append(statfs_error)
                else:
                    logs.append(log_line)
            else:
                continue
        else:
            if logs != []:
                logs[-1]['description'] += "\n" + line

    if search:
        logs = search_array(logs, search['value'], search['negation'])

    if sort:
        if sort['fields']:
            logs = sort_array(logs,
                              order=sort['order'],
                              sort_by=sort['fields'])
        else:
            logs = sort_array(logs, order=sort['order'], sort_by=['timestamp'])
    else:
        logs = sort_array(logs, order='desc', sort_by=['timestamp'])

    return {'items': cut_array(logs, offset, limit), 'totalItems': len(logs)}
Example #14
0
    def get_rules(status=None,
                  group=None,
                  pci=None,
                  gdpr=None,
                  path=None,
                  file=None,
                  id=None,
                  level=None,
                  offset=0,
                  limit=common.database_limit,
                  sort=None,
                  search=None):
        """
        Gets a list of rules.

        :param status: Filters by status: enabled, disabled, all.
        :param group: Filters by group.
        :param pci: Filters by pci requirement.
        :param gdpr: Filter by gdpr requirement.
        :param file: Filters by file of the rule.
        :param path: Filters by file of the path.
        :param id: Filters by rule ID.
        :param level: Filters by level. It can be an integer or an range (i.e. '2-4' that means levels from 2 to 4).
        :param offset: First item to return.
        :param limit: Maximum number of items to return.
        :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
        :param search: Looks for items with the specified string.
        :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
        """
        all_rules = []

        if level:
            levels = level.split('-')
            if len(levels) < 0 or len(levels) > 2:
                raise WazuhException(1203)

        for rule_file in Rule.get_rules_files(status=status,
                                              limit=None)['items']:
            all_rules.extend(
                Rule.__load_rules_from_file(rule_file['file'],
                                            rule_file['path'],
                                            rule_file['status']))

        rules = list(all_rules)
        for r in all_rules:
            if group and group not in r.groups:
                rules.remove(r)
                continue
            elif pci and pci not in r.pci:
                rules.remove(r)
                continue
            elif gdpr and gdpr not in r.gdpr:
                rules.remove(r)
                continue
            elif path and path != r.path:
                rules.remove(r)
                continue
            elif file and file != r.file:
                rules.remove(r)
                continue
            elif id and int(id) != r.id:
                rules.remove(r)
                continue
            elif level:
                if len(levels) == 1:
                    if int(levels[0]) != r.level:
                        rules.remove(r)
                        continue
                elif not (int(levels[0]) <= r.level <= int(levels[1])):
                    rules.remove(r)
                    continue

        if search:
            rules = search_array(rules, search['value'], search['negation'])

        if sort:
            rules = sort_array(rules, sort['fields'], sort['order'],
                               Rule.SORT_FIELDS)
        else:
            rules = sort_array(rules, ['id'], 'asc')

        return {
            'items': cut_array(rules, offset, limit),
            'totalItems': len(rules)
        }
Example #15
0
def get_all_groups(offset=0,
                   limit=common.database_limit,
                   sort=None,
                   search=None,
                   hash_algorithm='md5'):
    """
    Gets the existing groups.

    :param offset: First item to return.
    :param limit: Maximum number of items to return.
    :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
    :param search: Looks for items with the specified string.
    :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
    """
    def get_hash(file, hash_algorithm='md5'):
        filename = "{0}/{1}".format(common.shared_path, file)

        # check hash algorithm
        try:
            algorithm_list = hashlib.algorithms_available
        except Exception as e:
            algorithm_list = hashlib.algorithms

        if not hash_algorithm in algorithm_list:
            raise WazuhException(
                1723, "Available algorithms are {0}.".format(algorithm_list))

        hashing = hashlib.new(hash_algorithm)

        try:
            with open(filename, 'rb') as f:
                hashing.update(f.read())
        except IOError:
            return None

        return hashing.hexdigest()

    # Connect DB
    db_global = glob(common.database_path_global)
    if not db_global:
        raise WazuhException(1600)

    conn = Connection(db_global[0])
    query = "SELECT {0} FROM agent WHERE `group` = :group_id"

    # Group names
    data = []
    for entry in listdir(common.shared_path):
        full_entry = path.join(common.shared_path, entry)
        if not path.isdir(full_entry):
            continue

        # Group count
        request = {'group_id': entry}
        conn.execute(query.format('COUNT(*)'), request)

        # merged.mg and agent.conf sum
        merged_sum = get_hash(entry + "/merged.mg")
        conf_sum = get_hash(entry + "/agent.conf")

        item = {'count': conn.fetch()[0], 'name': entry}

        if merged_sum:
            item['merged_sum'] = merged_sum

        if conf_sum:
            item['conf_sum'] = conf_sum

        data.append(item)

    if search:
        data = search_array(data,
                            search['value'],
                            search['negation'],
                            fields=['name'])

    if sort:
        data = sort_array(data, sort['fields'], sort['order'])
    else:
        data = sort_array(data, ['name'])

    return {'items': cut_array(data, offset, limit), 'totalItems': len(data)}
Example #16
0
    def get_decoders_files(status=None,
                           path=None,
                           file=None,
                           offset=0,
                           limit=common.database_limit,
                           sort=None,
                           search=None):
        """
        Gets a list of the available decoder files.

        :param status: Filters by status: enabled, disabled, all.
        :param path: Filters by path.
        :param file: Filters by filename.
        :param offset: First item to return.
        :param limit: Maximum number of items to return.
        :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
        :param search: Looks for items with the specified string.
        :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
        """

        status = Decoder.__check_status(status)

        ruleset_conf = configuration.get_ossec_conf(section='ruleset')
        if not ruleset_conf:
            raise WazuhException(1500)

        tmp_data = []
        tags = ['decoder_include', 'decoder_exclude']
        exclude_filenames = []
        for tag in tags:
            if tag in ruleset_conf:
                item_status = Decoder.S_DISABLED if tag == 'decoder_exclude' else Decoder.S_ENABLED

                if type(ruleset_conf[tag]) is list:
                    items = ruleset_conf[tag]
                else:
                    items = [ruleset_conf[tag]]

                for item in items:
                    if '/' in item:
                        item_split = item.split('/')
                        item_name = item_split[-1]
                        item_dir = "{0}/{1}".format(common.ossec_path,
                                                    "/".join(item_split[:-1]))
                    else:
                        item_name = item
                        item_dir = "{0}/{1}".format(common.ruleset_rules_path,
                                                    item)

                    if tag == 'decoder_exclude':
                        exclude_filenames.append(item_name)
                        # tmp_data.append({'file': item_name, 'path': '-', 'status': item_status})
                    else:
                        tmp_data.append({
                            'file': item_name,
                            'path': item_dir,
                            'status': item_status
                        })

        tag = 'decoder_dir'
        if tag in ruleset_conf:
            if type(ruleset_conf[tag]) is list:
                items = ruleset_conf[tag]
            else:
                items = [ruleset_conf[tag]]

            for item_dir in items:
                all_decoders = "{0}/{1}/*.xml".format(common.ossec_path,
                                                      item_dir)

                for item in glob(all_decoders):
                    item_split = item.split('/')
                    item_name = item_split[-1]
                    item_dir = "/".join(item_split[:-1])
                    if item_name in exclude_filenames:
                        item_status = Decoder.S_DISABLED
                    else:
                        item_status = Decoder.S_ENABLED
                    tmp_data.append({
                        'file': item_name,
                        'path': item_dir,
                        'status': item_status
                    })

        data = list(tmp_data)
        for d in tmp_data:
            if status and status != 'all' and status != d['status']:
                data.remove(d)
                continue
            if path and path != d['path']:
                data.remove(d)
                continue
            if file and file != d['file']:
                data.remove(d)
                continue

        if search:
            data = search_array(data, search['value'], search['negation'])

        if sort:
            data = sort_array(data, sort['fields'], sort['order'])
        else:
            data = sort_array(data, ['file'], 'asc')

        return {
            'items': cut_array(data, offset, limit),
            'totalItems': len(data)
        }
Example #17
0
    def get_decoders(status=None,
                     path=None,
                     file=None,
                     name=None,
                     parents=False,
                     offset=0,
                     limit=common.database_limit,
                     sort=None,
                     search=None):
        """
        Gets a list of available decoders.

        :param status: Filters by status: enabled, disabled, all.
        :param path: Filters by path.
        :param file: Filters by file.
        :param name: Filters by name.
        :param parents: Just parent decoders.
        :param offset: First item to return.
        :param limit: Maximum number of items to return.
        :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
        :param search: Looks for items with the specified string.
        :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
        """
        status = Decoder.__check_status(status)
        all_decoders = []

        for decoder_file in Decoder.get_decoders_files(status=status,
                                                       limit=0)['items']:
            all_decoders.extend(
                Decoder.__load_decoders_from_file(decoder_file['file'],
                                                  decoder_file['path'],
                                                  decoder_file['status']))

        decoders = list(all_decoders)
        for d in all_decoders:
            if path and path != d.path:
                decoders.remove(d)
                continue
            if file and file != d.file:
                decoders.remove(d)
                continue
            if name and name != d.name:
                decoders.remove(d)
                continue
            if parents and 'parent' in d.details:
                decoders.remove(d)
                continue

        if search:
            decoders = search_array(decoders, search['value'],
                                    search['negation'])

        if sort:
            decoders = sort_array(decoders, sort['fields'], sort['order'],
                                  Decoder.SORT_FIELDS)
        else:
            decoders = sort_array(decoders, ['file', 'position'], 'asc')

        return {
            'items': cut_array(decoders, offset, limit),
            'totalItems': len(decoders)
        }