Ejemplo n.º 1
0
def files(agent_id=None, event=None, filename=None, filetype='file', md5=None, sha1=None, hash=None, summary=False, offset=0, limit=common.database_limit, sort=None, search=None):
    """
    Return a list of files from the database that match the filters

    :param agent_id: Agent ID.
    :param event: Filters by event: added, readded, modified, deleted.
    :param filename: Filters by filename.
    :param filetype: Filters by filetype: file or registry.
    :param md5: Filters by md5 hash.
    :param sha1: Filters by sha1 hash.
    :param hash: Filters by md5 or sha1 hash.
    :param summary: Returns a summary grouping by filename.
    :param offset: First item to return.
    :param limit: Maximum number of items to return.
    :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
    :param search: Looks for items with the specified string.
    :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
    """

    # Connection
    db_agent = glob('{0}/{1}-*.db'.format(common.database_path_agents, agent_id))
    if not db_agent:
        raise WazuhException(1600)
    else:
        db_agent = db_agent[0]

    conn = Connection(db_agent)

    agent_os = Agent(agent_id).get_basic_information()['os']

    if "windows" in agent_os.lower():
        windows_agent = True
    else:
        windows_agent = False

    fields = {'scanDate': 'date', 'modificationDate': 'mtime', 'file': 'path', 'size': 'size', 'user': '******', 'group': 'gname'}

    # Query
    query = "SELECT {0} FROM fim_event, fim_file WHERE fim_event.id_file = fim_file.id AND fim_file.type = :filetype"
    request = {'filetype': filetype}

    if event:
        query += ' AND fim_event.type = :event'
        request['event'] = event

    if filename:
        query += ' AND path = :filename'
        request['filename'] = filename

    if md5:
        query += ' AND md5 = :md5'
        request['md5'] = md5

    if sha1:
        query += ' AND sha1 = :sha1'
        request['sha1'] = sha1

    if hash:
        query += ' AND (md5 = :hash OR sha1 = :hash)'
        request['hash'] = hash

    if search:
        query += " AND NOT" if bool(search['negation']) else ' AND'
        query += " (" + " OR ".join(x + ' LIKE :search' for x in ('path', "date", 'size', 'md5', 'sha1', 'uname', 'gname', 'inode', 'perm')) + " )"
        request['search'] = '%{0}%'.format(search['value'])

    # Total items
    if summary:
        query += ' group by path'
        conn.execute("SELECT COUNT(*) FROM ({0}) AS TEMP".format(query.format("max(date)")), request)
    else:
        conn.execute(query.format('COUNT(*)'), request)

    data = {'totalItems': conn.fetch()[0]}

    # Sorting
    if sort:
        allowed_sort_fields = fields.keys()
        for sf in sort['fields']:
            if sf not in allowed_sort_fields:
                raise WazuhException(1403, 'Allowed sort fields: {0}. Field: {1}'.format(allowed_sort_fields, sf))

        query += ' ORDER BY ' + ','.join(['{0} {1}'.format(fields[i], sort['order']) for i in sort['fields']])
    else:
        query += ' ORDER BY date DESC'

    query += ' LIMIT :offset,:limit'
    request['offset'] = offset
    request['limit'] = limit

    if summary:
        select = ["max(date)", "mtime", "fim_event.type", "path"]
    else:
        select = ["date", "mtime", "fim_event.type", "path", "size", "perm", "uid", "gid", "md5", "sha1", "uname", "gname", "inode"]

    conn.execute(query.format(','.join(select)), request)

    data['items'] = []

    for tuple in conn:
        data_tuple = {}

        if tuple[0] != None:
            data_tuple['scanDate'] = tuple[0]
        if tuple[1] != None:
            data_tuple['modificationDate'] = tuple[1]  # modificationDate
        else:
            data_tuple['modificationDate'] = tuple[0]  # scanDate
        if tuple[2] != None:
            data_tuple['event'] = tuple[2]
        if tuple[3] != None:
            data_tuple['file'] = tuple[3]

        if not summary:
            try:
                permissions = filemode(int(tuple[5], 8))
            except TypeError:
                permissions = None

            if tuple[4] != None:
                data_tuple['size'] = tuple[4]
            if tuple[8] != None:
                data_tuple['md5'] = tuple[8]
            if tuple[9] != None:
                data_tuple['sha1'] = tuple[9]
            if tuple[12] != None:
                data_tuple['inode'] = tuple[12]

            if not windows_agent:
                if tuple[6] != None:
                    data_tuple['uid'] = tuple[6]
                if tuple[7] != None:
                    data_tuple['gid'] = tuple[7]

                if tuple[10] != None:
                    data_tuple['user'] = tuple[10]
                if tuple[11] != None:
                    data_tuple['group'] = tuple[11]

                if tuple[5] != None:
                    data_tuple['octalMode'] = tuple[5]
                if permissions:
                    data_tuple['permissions'] = permissions


        data['items'].append(data_tuple)

    return data
Ejemplo n.º 2
0
 def check_sort_fields(allowed_sort_fields, sort_by):
     # Check if every element in sort['fields'] is in allowed_sort_fields
     if not sort_by.issubset(allowed_sort_fields):
         incorrect_fields = ', '.join(sort_by - allowed_sort_fields)
         raise WazuhException(1403, 'Allowed sort fields: {0}. Fields: {1}'.format(', '.join(allowed_sort_fields), incorrect_fields))
Ejemplo n.º 3
0
        txt_data = '<root_tag>' + txt_data + '</root_tag>'

        conf = ET.fromstring(txt_data)

        return conf.find('ossec_config').find('cluster').find(
            'disabled').text == 'no'
    except:
        return False


# import python-cryptography lib only if cluster is enabled
if check_cluster_status():
    try:
        from cryptography.fernet import Fernet, InvalidToken, InvalidSignature
    except ImportError as e:
        raise WazuhException(3008, str(e))


class WazuhClusterClient(asynchat.async_chat):
    def __init__(self, host, port, key, data, file):
        asynchat.async_chat.__init__(self)
        self.can_read = False
        self.can_write = True
        self.received_data = []
        self.response = ""
        self.f = key
        self.data = data
        self.file = file
        self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
        self.socket.settimeout(common.cluster_timeout)
        try:
Ejemplo n.º 4
0
Wazuh is a python package to manage OSSEC.

"""

__version__ = '3.9.0'

msg = "\n\nPython 2.7 or newer not found."
msg += "\nUpdate it or set the path to a valid version. Example:"
msg += "\n  export PATH=$PATH:/opt/rh/python27/root/usr/bin"
msg += "\n  export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/rh/python27/root/usr/lib64"

try:
    from sys import version_info as python_version
    if python_version.major < 2 or (python_version.major == 2
                                    and python_version.minor < 7):
        raise WazuhException(999, msg)
except Exception as e:
    raise WazuhException(999, msg)


class Wazuh:
    """
    Basic class to set up OSSEC directories
    """

    OSSEC_INIT = '/etc/ossec-init.conf'

    def __init__(self, ossec_path='/var/ossec', get_init=False):
        """
        Initialize basic information and directories.
        :param ossec_path: OSSEC Path. By default it is /var/ossec.
Ejemplo n.º 5
0
def files(agent_id=None,
          summary=False,
          offset=0,
          limit=common.database_limit,
          sort=None,
          search=None,
          select=None,
          filters={}):
    """
    Return a list of files from the database that match the filters

    :param agent_id: Agent ID.
    :param filters: Fields to filter by
    :param summary: Returns a summary grouping by filename.
    :param offset: First item to return.
    :param limit: Maximum number of items to return.
    :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
    :param search: Looks for items with the specified string.
    :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
    """
    parameters = {
        "date", "mtime", "file", "size", "perm", "uname", "gname", "md5",
        "sha1", "sha256", "inode", "gid", "uid", "type", "attributes",
        "symbolic_path"
    }
    summary_parameters = {"date", "mtime", "file"}

    if select is None:
        select = summary_parameters if summary else parameters
    else:
        select = set(select['fields'])
        if not select.issubset(parameters):
            raise WazuhException(
                1724, "Allowed select fields: {0}. Fields: {1}.".format(
                    ', '.join(parameters), ','.join(select - parameters)))

    if 'hash' in filters:
        or_filters = {
            'md5': filters['hash'],
            'sha1': filters['hash'],
            'sha256': filters['hash']
        }
        del filters['hash']
    else:
        or_filters = {}

    items, totalItems = Agent(agent_id)._load_info_from_agent_db(
        table='fim_entry',
        select=select,
        offset=offset,
        limit=limit,
        sort=sort,
        search=search,
        filters=filters,
        count=True,
        or_filters=or_filters)
    for date_field in select & {'mtime', 'date'}:
        for item in items:
            # date fields with value 0 are returned as ND
            item[date_field] = "ND" if item[date_field] == 0 \
                                    else datetime.utcfromtimestamp(float(item[date_field])).strftime('%Y-%m-%d %H:%M:%S')

    return {'totalItems': totalItems, 'items': items}
Ejemplo n.º 6
0
    def send_msg_to_agent(self, msg, agent_id=None, msg_type=None):
        # Active-response
        #   Agents: /var/ossec/queue/alerts/ar
        #     - Existing command:
        #       - (msg_to_agent) [] NNS 001 restart-ossec0 arg1 arg2 arg3
        #       - (msg_to_agent) [] ANN (null) restart-ossec0 arg1 arg2 arg3
        #     - Custom command:
        #       - (msg_to_agent) [] NNS 001 !test.sh arg1 arg2 arg3
        #       - (msg_to_agent) [] ANN (null) !test.sh arg1 arg2 arg3
        #   Manager: /var/ossec/queue/alerts/execq
        #     - Existing command:
        #       - restart-ossec0 arg1 arg2 arg3
        #     - Custom command:
        #       - !test.sh Hello World

        # Build message
        ALL_AGENTS_C = 'A'
        NONE_C = 'N'
        SPECIFIC_AGENT_C = 'S'
        NO_AR_C = '!'

        if agent_id:
            str_all_agents = NONE_C
            str_agent = SPECIFIC_AGENT_C
            str_agent_id = agent_id
        else:
            str_all_agents = ALL_AGENTS_C
            str_agent = NONE_C
            str_agent_id = "(null)"

        # AR
        if msg_type == OssecQueue.AR_TYPE:

            if agent_id != "000":
                # Example restart 'msg': restart-ossec0 - null (from_the_server) (no_rule_id)
                socket_msg = "{0} {1}{2}{3} {4} {5}".format(
                    "(msg_to_agent) []", str_all_agents, NONE_C, str_agent,
                    str_agent_id, msg)
            elif agent_id == "000":
                socket_msg = msg

            # Send message
            try:
                self._send(socket_msg.encode())
            except:
                raise WazuhException(1652)

            return "Command sent."

        # Legacy: Restart syscheck, restart agents
        else:
            if msg == OssecQueue.HC_SK_RESTART:
                socket_msg = "{0} {1}{2}{3} {4} {5}".format(
                    "(msg_to_agent) []", str_all_agents, NO_AR_C, str_agent,
                    str_agent_id, OssecQueue.HC_SK_RESTART)
            elif msg == OssecQueue.RESTART_AGENTS:
                socket_msg = "{0} {1}{2}{3} {4} {5} - {6} (from_the_server) (no_rule_id)".format(
                    "(msg_to_agent) []", str_all_agents, NONE_C, str_agent,
                    str_agent_id, OssecQueue.RESTART_AGENTS, "null")
            else:
                raise WazuhException(1012, msg)

            # Send message
            try:
                self._send(socket_msg.encode())
            except:
                if msg == OssecQueue.HC_SK_RESTART:
                    if agent_id:
                        raise WazuhException(1601, "on agent")
                    else:
                        raise WazuhException(1601, "on all agents")
                elif msg == OssecQueue.RESTART_AGENTS:
                    raise WazuhException(1702)

            # Return message
            if msg == OssecQueue.HC_SK_RESTART:
                return "Restarting Syscheck/Rootcheck on agent" if agent_id else "Restarting Syscheck/Rootcheck on all agents"
            elif msg == OssecQueue.RESTART_AGENTS:
                return "Restarting agent" if agent_id else "Restarting all agents"
Ejemplo n.º 7
0
def validation():
    """
    Check if Wazuh configuration is OK.

    :return: Confirmation message.
    """
    lock_file = open(execq_lockfile, 'a+')
    fcntl.lockf(lock_file, fcntl.LOCK_EX)
    try:
        # sockets path
        api_socket_path = join(common.ossec_path, 'queue/alerts/execa')
        execq_socket_path = common.EXECQ
        # msg for checking Wazuh configuration
        execq_msg = 'check-manager-configuration '

        # remove api_socket if exists
        try:
            remove(api_socket_path)
        except OSError as e:
            if exists(api_socket_path):
                raise WazuhException(1014, str(e))

        # up API socket
        try:
            api_socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
            api_socket.bind(api_socket_path)
            # timeout
            api_socket.settimeout(5)
        except socket.error:
            raise WazuhException(1013)

        # connect to execq socket
        if exists(execq_socket_path):
            try:
                execq_socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
                execq_socket.connect(execq_socket_path)
            except socket.error:
                raise WazuhException(1013)
        else:
            raise WazuhException(1901)

        # send msg to execq socket
        try:
            execq_socket.send(execq_msg.encode())
            execq_socket.close()
        except socket.error as e:
            raise WazuhException(1014, str(e))
        finally:
            execq_socket.close()

        # if api_socket receives a message, configuration is OK
        try:
            buffer = bytearray()
            # receive data
            datagram = api_socket.recv(4096)
            buffer.extend(datagram)
        except socket.timeout as e:
            raise WazuhException(1014, str(e))
        finally:
            api_socket.close()
            # remove api_socket
            if exists(api_socket_path):
                remove(api_socket_path)

        try:
            response = _parse_execd_output(buffer.decode('utf-8').rstrip('\0'))
        except (KeyError, json.decoder.JSONDecodeError) as e:
            raise WazuhException(1904, str(e))
    finally:
        fcntl.lockf(lock_file, fcntl.LOCK_UN)
        lock_file.close()

    return response
Ejemplo n.º 8
0
def totals(year, month, day):
    """
    Returns the totals file.

    :param year: Year in YYYY format, e.g. 2016
    :param month: Month in number or 3 first letters, e.g. Feb or 2
    :param day: Day, e.g. 9
    :return: Array of dictionaries. Each dictionary represents an hour.
    """

    try:
        year = int(year)
        day = int(day)

        if year < 0 or day < 0 or day > 31:
            raise WazuhException(1307)

        day = "%02d" % day
    except ValueError:
        raise WazuhException(1307)

    if month not in MONTHS:
        try:
            index = int(month)
        except ValueError:
            raise WazuhException(1307)

        if index < 1 or index > 12:
            raise WazuhException(1307)

        try:
            month = MONTHS[index - 1]
        except IndexError:
            raise WazuhException(1307)

    try:
        stat_filename = common.stats_path + "/totals/" + str(
            year) + '/' + month + "/ossec-totals-" + day + ".log"
        stats = open(stat_filename, 'r')
    except IOError:
        raise WazuhException(1308, stat_filename)

    response = []
    alerts = []

    for line in stats:
        data = line.split('-')

        if len(data) == 4:
            hour = int(data[0])
            sigid = int(data[1])
            level = int(data[2])
            times = int(data[3])

            alert = {'sigid': sigid, 'level': level, 'times': times}
            alerts.append(alert)
        else:
            data = line.split('--')

            if len(data) != 5:
                if len(data) in (0, 1):
                    continue
                else:
                    raise WazuhException(1309)

            hour = int(data[0])
            total_alerts = int(data[1])
            events = int(data[2])
            syscheck = int(data[3])
            firewall = int(data[4])

            response.append({
                'hour': hour,
                'alerts': alerts,
                'totalAlerts': total_alerts,
                'events': events,
                'syscheck': syscheck,
                'firewall': firewall
            })
            alerts = []

    return response
Ejemplo n.º 9
0
    print(msg1)

    if more:
        print(msg2)

#
# Main
#
if __name__ == '__main__':

    # Validate cluster config
    cluster_config = None
    try:
        cluster_config = read_config()
        if 'node_type' not in cluster_config or (cluster_config['node_type'] != 'master' and cluster_config['node_type'] != 'worker'):
            raise WazuhException(3004, 'Invalid node type {0}. Correct values are master and worker'.format(cluster_config['node_type']))
    except WazuhException as e:
        print( "Invalid configuration: '{0}'".format(str(e)))
        exit(1)

    # Get cluster config
    is_master = cluster_config['node_type'] == "master"
    # get arguments
    parser = get_parser()
    args = parser.parse_args()

    if args.debug:
        logging.getLogger('').setLevel(logging.DEBUG) #10

    try:
        if args.filter_status and not args.list_agents:
Ejemplo n.º 10
0
def get_all_groups_sql(offset=0,
                       limit=common.database_limit,
                       sort=None,
                       search=None):
    """
    Gets the existing groups.

    :param offset: First item to return.
    :param limit: Maximum number of items to return.
    :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
    :param search: Looks for items with the specified string.
    :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
    """

    # Connect DB
    db_global = glob(common.database_path_global)
    if not db_global:
        raise WazuhException(1600)

    conn = Connection(db_global[0])

    # Init query
    query = "SELECT DISTINCT {0} FROM agent WHERE `group` IS NOT null"
    fields = {'name': 'group'}  # field: db_column
    select = ["`group`"]
    request = {}

    # Search
    if search:
        query += " AND NOT" if bool(search['negation']) else ' AND'
        query += " ( `group` LIKE :search )"
        request['search'] = '%{0}%'.format(search['value'])

    # Count
    conn.execute(query.format('COUNT(DISTINCT `group`)'), request)
    data = {'totalItems': conn.fetch()[0]}

    # Sorting
    if sort:
        if sort['fields']:
            allowed_sort_fields = fields.keys()
            # Check if every element in sort['fields'] is in allowed_sort_fields.
            if not set(sort['fields']).issubset(allowed_sort_fields):
                raise WazuhException(
                    1403, 'Allowed sort fields: {0}. Fields: {1}'.format(
                        allowed_sort_fields, sort['fields']))

            order_str_fields = [
                '`{0}` {1}'.format(fields[i], sort['order'])
                for i in sort['fields']
            ]
            query += ' ORDER BY ' + ','.join(order_str_fields)
        else:
            query += ' ORDER BY `group` {0}'.format(sort['order'])
    else:
        query += ' ORDER BY `group` ASC'

    # OFFSET - LIMIT
    if limit:
        query += ' LIMIT :offset,:limit'
        request['offset'] = offset
        request['limit'] = limit

    # Data query
    conn.execute(query.format(','.join(select)), request)

    data['items'] = []

    for tuple in conn:
        if tuple[0] != None:
            data['items'].append(tuple[0])

    return data
def get_sca_checks(policy_id,
                   agent_id=None,
                   q="",
                   offset=0,
                   limit=common.database_limit,
                   sort=None,
                   search=None,
                   select=None,
                   filters={}):
    """
    Gets a list of checks analized for a policy
    :param policy_id: policy id to get the checks from
    :param agent_id: agent id to get the policies from
    :param q: Defines query to filter in DB.
    :param offset: First item to return.
    :param limit: Maximum number of items to return.
    :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
    :param search: Looks for items with the specified string. Format: {"fields": ["field1","field2"]}
    :param select: Select fields to return. Format: {"fields":["field1","field2"]}.
    :param filters: Defines field filters required by the user. Format: {"field1":"value1", "field2":["value2","value3"]}

    :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
    """
    fields_translation = {
        **fields_translation_sca_check,
        **fields_translation_sca_check_compliance,
        **fields_translation_sca_check_rule
    }

    full_select = {
        'fields': (list(fields_translation_sca_check.keys()) +
                   list(fields_translation_sca_check_compliance.keys()) +
                   list(fields_translation_sca_check_rule.keys()))
    }

    db_query = WazuhDBQuerySCA(agent_id=agent_id,
                               offset=offset,
                               limit=limit,
                               sort=sort,
                               search=search,
                               select=full_select,
                               count=True,
                               get_data=True,
                               query=f"policy_id={policy_id}"
                               if q == "" else f"policy_id={policy_id};{q}",
                               filters=filters,
                               default_query=default_query_sca_check,
                               default_sort_field='policy_id',
                               fields=fields_translation,
                               count_field='id')

    result_dict = db_query.run()

    if 'items' in result_dict:
        checks = result_dict['items']
    else:
        raise WazuhException(2007)

    groups = groupby(checks, key=itemgetter('id'))
    result = []
    select_fields = full_select['fields'] if select is None else select[
        'fields']
    select_fields = set([
        field if field != 'compliance' else 'compliance'
        for field in select_fields if field in fields_translation_sca_check
    ])
    # Rearrange check and compliance fields
    for _, group in groups:
        group_list = list(group)
        check_dict = {
            k: v
            for k, v in group_list[0].items() if k in select_fields
        }
        for extra_field, field_translations in [
            ('compliance', fields_translation_sca_check_compliance),
            ('rules', fields_translation_sca_check_rule)
        ]:
            if (select is None or extra_field in select['fields']) and set(
                    field_translations.keys()) & group_list[0].keys():
                check_dict[extra_field] = [
                    dict(zip(field_translations.values(), x))
                    for x in set((map(itemgetter(
                        *field_translations.keys()), group_list)))
                ]

        result.append(check_dict)

    return {'totalItems': result_dict['totalItems'], 'items': result}
Ejemplo n.º 12
0
def get_agent_group(group_id,
                    offset=0,
                    limit=common.database_limit,
                    sort=None,
                    search=None,
                    select=None):
    """
    Gets the agents in a group

    :param group_id: Group ID.
    :param offset: First item to return.
    :param limit: Maximum number of items to return.
    :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
    :param search: Looks for items with the specified string.
    :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
    """

    # Connect DB
    db_global = glob(common.database_path_global)
    if not db_global:
        raise WazuhException(1600)

    conn = Connection(db_global[0])
    valid_select_fiels = {
        "id", "name", "ip", "last_keepalive", "os_name", "os_version",
        "os_platform", "os_uname", "version", "config_sum", "merged_sum",
        "manager_host", "status"
    }
    # fields like status need to retrieve others to be properly computed.
    dependent_select_fields = {'status': {'last_keepalive', 'version'}}
    search_fields = {"id", "name", "os_name"}

    # Init query
    query = "SELECT {0} FROM agent WHERE `group` = :group_id"
    fields = {'id': 'id', 'name': 'name'}  # field: db_column
    request = {'group_id': group_id}

    # Select
    if select:
        select_fields_param = set(select['fields'])

        if not select_fields_param.issubset(valid_select_fiels):
            uncorrect_fields = select_fields_param - valid_select_fiels
            raise WazuhException(1724, "Allowed select fields: {0}. Fields {1}".\
                    format(', '.join(list(valid_select_fiels)), ', '.join(uncorrect_fields)))

        select_fields = select_fields_param
    else:
        select_fields = valid_select_fiels

    # add dependent select fields to the database select query
    db_select_fields = set()
    for dependent, fields in dependent_select_fields.items():
        if dependent in select_fields:
            db_select_fields |= fields
    db_select_fields |= (select_fields - set(dependent_select_fields.keys()))

    # Search
    if search:
        query += " AND NOT" if bool(search['negation']) else ' AND'
        query += " (" + " OR ".join(x + ' LIKE :search'
                                    for x in search_fields) + " )"
        request['search'] = '%{0}%'.format(
            int(search['value']) if search['value'].isdigit(
            ) else search['value'])

    # Count
    conn.execute(query.format('COUNT(*)'), request)
    data = {'totalItems': conn.fetch()[0]}

    # Sorting
    if sort:
        if sort['fields']:
            allowed_sort_fields = db_select_fields
            # Check if every element in sort['fields'] is in allowed_sort_fields.
            if not set(sort['fields']).issubset(allowed_sort_fields):
                raise WazuhException(1403, 'Allowed sort fields: {0}. Fields: {1}'.\
                    format(allowed_sort_fields, sort['fields']))

            order_str_fields = [
                '{0} {1}'.format(fields[i], sort['order'])
                for i in sort['fields']
            ]
            query += ' ORDER BY ' + ','.join(order_str_fields)
        else:
            query += ' ORDER BY id {0}'.format(sort['order'])
    else:
        query += ' ORDER BY id ASC'

    # OFFSET - LIMIT
    if limit:
        query += ' LIMIT :offset,:limit'
        request['offset'] = offset
        request['limit'] = limit

    # Data query
    conn.execute(query.format(','.join(db_select_fields)), request)

    non_nested = [{field:tuple_elem for field,tuple_elem \
            in zip(db_select_fields, tuple) if tuple_elem} for tuple in conn]

    if 'id' in select_fields:
        map(lambda x: setitem(x, 'id', str(x['id']).zfill(3)), non_nested)

    if 'status' in select_fields:
        try:
            map(
                lambda x: setitem(
                    x, 'status',
                    Agent.calculate_status(x['last_keepalive'], x['version'] ==
                                           None)), non_nested)
        except KeyError:
            pass

    # return only the fields requested by the user (saved in select_fields) and not the dependent ones
    non_nested = [{k: v
                   for k, v in d.items() if k in select_fields}
                  for d in non_nested]

    data['items'] = [plain_dict_to_nested_dict(d, ['os']) for d in non_nested]

    return data
Ejemplo n.º 13
0
def get_all_groups(offset=0,
                   limit=common.database_limit,
                   sort=None,
                   search=None,
                   hash_algorithm='md5'):
    """
    Gets the existing groups.

    :param offset: First item to return.
    :param limit: Maximum number of items to return.
    :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
    :param search: Looks for items with the specified string.
    :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
    """
    def get_hash(file, hash_algorithm='md5'):
        filename = "{0}/{1}".format(common.shared_path, file)

        # check hash algorithm
        try:
            algorithm_list = hashlib.algorithms_available
        except Exception as e:
            algorithm_list = hashlib.algorithms

        if not hash_algorithm in algorithm_list:
            raise WazuhException(
                1723, "Available algorithms are {0}.".format(algorithm_list))

        hashing = hashlib.new(hash_algorithm)

        try:
            with open(filename, 'rb') as f:
                hashing.update(f.read())
        except IOError:
            return None

        return hashing.hexdigest()

    # Connect DB
    db_global = glob(common.database_path_global)
    if not db_global:
        raise WazuhException(1600)

    conn = Connection(db_global[0])
    query = "SELECT {0} FROM agent WHERE `group` = :group_id"

    # Group names
    data = []
    for entry in listdir(common.shared_path):
        full_entry = path.join(common.shared_path, entry)
        if not path.isdir(full_entry):
            continue

        # Group count
        request = {'group_id': entry}
        conn.execute(query.format('COUNT(*)'), request)

        # merged.mg and agent.conf sum
        merged_sum = get_hash(entry + "/merged.mg")
        conf_sum = get_hash(entry + "/agent.conf")

        item = {'count': conn.fetch()[0], 'name': entry}

        if merged_sum:
            item['merged_sum'] = merged_sum

        if conf_sum:
            item['conf_sum'] = conf_sum

        data.append(item)

    if search:
        data = search_array(data,
                            search['value'],
                            search['negation'],
                            fields=['name'])

    if sort:
        data = sort_array(data, sort['fields'], sort['order'])
    else:
        data = sort_array(data, ['name'])

    return {'items': cut_array(data, offset, limit), 'totalItems': len(data)}
Ejemplo n.º 14
0
    def remove_bulk_agents(agent_ids_list, logger):
        """
        Removes files created by agents in worker nodes. This function doesn't remove agents from client.keys since the
        client.keys file is overwritten by the master node.
        :param agent_ids_list: List of agents ids to remove.
        :return: None.
        """
        def remove_agent_file_type(agent_files):
            for filetype in agent_files:

                filetype_glob = filetype.format(ossec_path=common.ossec_path,
                                                id='*',
                                                name='*',
                                                ip='*')
                filetype_agent = {
                    filetype.format(ossec_path=common.ossec_path,
                                    id=a['id'],
                                    name=a['name'],
                                    ip=a['ip'])
                    for a in agent_info
                }

                for agent_file in set(
                        glob.iglob(filetype_glob)) & filetype_agent:
                    logger.debug2("Removing {}".format(agent_file))
                    if os.path.isdir(agent_file):
                        shutil.rmtree(agent_file)
                    else:
                        os.remove(agent_file)

        if not agent_ids_list:
            return  # the function doesn't make sense if there is no agents to remove

        logger.info("Removing files from {} agents".format(
            len(agent_ids_list)))
        logger.debug("Agents to remove: {}".format(', '.join(agent_ids_list)))
        # the agents must be removed in groups of 997: 999 is the limit of SQL variables per query. Limit and offset are
        # always included in the SQL query, so that leaves 997 variables as limit.
        for agents_ids_sublist in itertools.zip_longest(*itertools.repeat(
                iter(agent_ids_list), 997),
                                                        fillvalue='0'):
            agents_ids_sublist = list(
                filter(lambda x: x != '0', agents_ids_sublist))
            # Get info from DB
            agent_info = Agent.get_agents_overview(
                q=",".join(["id={}".format(i) for i in agents_ids_sublist]),
                select={'fields': ['ip', 'id', 'name']},
                limit=None)['items']
            logger.debug2("Removing files from agents {}".format(
                ', '.join(agents_ids_sublist)))

            files_to_remove = [
                '{ossec_path}/queue/agent-info/{name}-{ip}',
                '{ossec_path}/queue/rootcheck/({name}) {ip}->rootcheck',
                '{ossec_path}/queue/diff/{name}',
                '{ossec_path}/queue/agent-groups/{id}',
                '{ossec_path}/queue/rids/{id}',
                '{ossec_path}/var/db/agents/{name}-{id}.db'
            ]
            remove_agent_file_type(files_to_remove)

            logger.debug2("Removing agent group assigments from database")
            # remove agent from groups
            db_global = glob.glob(common.database_path_global)
            if not db_global:
                raise WazuhException(1600)

            conn = Connection(db_global[0])
            agent_ids_db = {
                'id_agent{}'.format(i): int(i)
                for i in agents_ids_sublist
            }
            conn.execute(
                'delete from belongs where {}'.format(' or '.join([
                    'id_agent = :{}'.format(i) for i in agent_ids_db.keys()
                ])), agent_ids_db)
            conn.commit()

            # Tell wazuhbd to delete agent database
            wdb_conn = WazuhDBConnection()
            wdb_conn.delete_agents_db(agents_ids_sublist)

        logger.info("Agent files removed")
Ejemplo n.º 15
0
 def __ge__(self, other):
     if isinstance(other, Rule):
         return self.id >= other.id
     else:
         raise WazuhException(1204)
Ejemplo n.º 16
0
def read_config(config_file=common.ossec_conf):
    cluster_default_configuration = {
        'disabled': False,
        'node_type': 'master',
        'name': 'wazuh',
        'node_name': 'node01',
        'key': '',
        'port': 1516,
        'bind_addr': '0.0.0.0',
        'nodes': ['NODE_IP'],
        'hidden': 'no'
    }

    try:
        config_cluster = get_ossec_conf(section='cluster',
                                        conf_file=config_file)
    except WazuhException as e:
        if e.code == 1106:
            # if no cluster configuration is present in ossec.conf, return default configuration but disabling it.
            cluster_default_configuration['disabled'] = True
            return cluster_default_configuration
        else:
            raise WazuhException(3006, e.message)
    except Exception as e:
        raise WazuhException(3006, str(e))

    # if any value is missing from user's cluster configuration, add the default one:
    for value_name in set(cluster_default_configuration.keys()) - set(
            config_cluster.keys()):
        config_cluster[value_name] = cluster_default_configuration[value_name]

    if isinstance(config_cluster['port'],
                  str) and not config_cluster['port'].isdigit():
        raise WazuhException(3004, "Cluster port must be an integer.")

    config_cluster['port'] = int(config_cluster['port'])
    if config_cluster['disabled'] == 'no':
        config_cluster['disabled'] = False
    elif config_cluster['disabled'] == 'yes':
        config_cluster['disabled'] = True
    elif not isinstance(config_cluster['disabled'], bool):
        raise WazuhException(
            3004,
            "Allowed values for 'disabled' field are 'yes' and 'no'. Found: '{}'"
            .format(config_cluster['disabled']))

    # if config_cluster['node_name'].upper() == '$HOSTNAME':
    #     # The HOSTNAME environment variable is not always available in os.environ so use socket.gethostname() instead
    #     config_cluster['node_name'] = gethostname()

    # if config_cluster['node_name'].upper() == '$NODE_NAME':
    #     if 'NODE_NAME' in environ:
    #         config_cluster['node_name'] = environ['NODE_NAME']
    #     else:
    #         raise WazuhException(3006, 'Unable to get the $NODE_NAME environment variable')

    # if config_cluster['node_type'].upper() == '$NODE_TYPE':
    #     if 'NODE_TYPE' in environ:
    #         config_cluster['node_type'] = environ['NODE_TYPE']
    #     else:
    #         raise WazuhException(3006, 'Unable to get the $NODE_TYPE environment variable')

    if config_cluster['node_type'] == 'client':
        logger.info("Deprecated node type 'client'. Using 'worker' instead.")
        config_cluster['node_type'] = 'worker'

    return config_cluster
Ejemplo n.º 17
0
def main():
    # Check arguments
    if args.list_outdated:
        list_outdated()
        exit(0)

    if not args.agent:
        arg_parser.print_help()
        exit(0)

    if args.silent:
        args.debug = False

    # Capture Ctrl + C
    signal(SIGINT, signal_handler)

    # Initialize framework
    myWazuh = Wazuh(get_init=True)

    agent = Agent(id=args.agent)
    agent._load_info_from_DB()

    agent_info = "{0}/queue/agent-info/{1}-{2}".format(common.ossec_path, agent.name, agent.ip)
    if not os.path.isfile(agent_info):
        raise WazuhException(1720)

    # Custom WPK file
    if args.file:
        if args.execute:
            upgrade_command_result = agent.upgrade_custom(file_path=args.file, installer=args.execute, debug=args.debug, show_progress=print_progress if not args.silent else None, chunk_size=args.chunk_size, rl_timeout=-1 if args.timeout == None else args.timeout)
            if not args.silent:
                if not args.debug:
                    print("\n{0}... Please wait.".format(upgrade_command_result))
                else:
                    print(upgrade_command_result)

            counter = 0
            agent_info_stat = os.stat(agent_info).st_mtime

            sleep(10)
            while agent_info_stat == os.stat(agent_info).st_mtime and counter < common.agent_info_retries:
                sleep(common.agent_info_sleep)
                counter = counter + 1

            if agent_info_stat == os.stat(agent_info).st_mtime:
                raise WazuhException(1716, "Timeout waiting for agent reconnection.")

            upgrade_result = agent.upgrade_result(debug=args.debug)
            if not args.silent:
                print(upgrade_result)
        else:
            print("Error: Need executable filename.")

    # WPK upgrade file
    else:
        prev_ver = agent.version
        upgrade_command_result = agent.upgrade(wpk_repo=args.repository, debug=args.debug, version=args.version, force=args.force, show_progress=print_progress if not args.silent else None, chunk_size=args.chunk_size, rl_timeout=-1 if args.timeout == None else args.timeout)
        if not args.silent:
            if not args.debug:
                print("\n{0}... Please wait.".format(upgrade_command_result))
            else:
                print(upgrade_command_result)

        counter = 0
        agent_info_stat = os.stat(agent_info).st_mtime

        while agent_info_stat == os.stat(agent_info).st_mtime and counter < common.agent_info_retries:
            sleep(common.agent_info_sleep)
            counter = counter + 1

        if agent_info_stat == os.stat(agent_info).st_mtime:
            raise WazuhException(1716, "Timeout waiting for agent reconnection.")

        sleep(10)
        upgrade_result = agent.upgrade_result(debug=args.debug)
        if not args.silent:
            if not args.debug:
                agent._load_info_from_DB()
                print("Agent upgraded: {0} -> {1}".format(prev_ver, agent.version))
            else:
                print(upgrade_result)
Ejemplo n.º 18
0
def walk_dir(dirname,
             recursive,
             files,
             excluded_files,
             excluded_extensions,
             get_cluster_item_key,
             get_md5=True,
             whoami='master'):
    walk_files = {}

    try:
        entries = listdir(common.ossec_path + dirname)
    except OSError as e:
        raise WazuhException(3015, str(e))

    for entry in entries:
        if entry in excluded_files or reduce(
                add, map(lambda x: entry[-(len(x)):] == x,
                         excluded_extensions)):
            continue

        try:
            full_path = path.join(dirname, entry)
            if entry in files or files == ["all"]:

                if not path.isdir(common.ossec_path + full_path):
                    file_mod_time = datetime.utcfromtimestamp(
                        stat(common.ossec_path + full_path).st_mtime)

                    if whoami == 'worker' and file_mod_time < (
                            datetime.utcnow() - timedelta(minutes=30)):
                        continue

                    entry_metadata = {
                        "mod_time": str(file_mod_time),
                        'cluster_item_key': get_cluster_item_key
                    }
                    if '.merged' in entry:
                        entry_metadata['merged'] = True
                        entry_metadata[
                            'merge_type'] = 'agent-info' if 'agent-info' in entry else 'agent-groups'
                        entry_metadata['merge_name'] = dirname + '/' + entry
                    else:
                        entry_metadata['merged'] = False

                    if get_md5:
                        entry_metadata['md5'] = md5(common.ossec_path +
                                                    full_path)

                    walk_files[full_path] = entry_metadata

            if recursive and path.isdir(common.ossec_path + full_path):
                walk_files.update(
                    walk_dir(full_path, recursive, files, excluded_files,
                             excluded_extensions, get_cluster_item_key,
                             get_md5, whoami))

        except Exception as e:
            logger.error("Could not get checksum of file {}: {}".format(
                entry, e))

    return walk_files
Ejemplo n.º 19
0
def upload_xml(xml_file, path):
    """
    Updates XML files (rules and decoders)
    :param xml_file: content of the XML file
    :param path: Destination of the new XML file
    :return: Confirmation message
    """
    # path of temporary files for parsing xml input
    tmp_file_path = '{}/tmp/api_tmp_file_{}_{}.xml'.format(
        common.ossec_path, time.time(), random.randint(0, 1000))

    # create temporary file for parsing xml input
    try:
        with open(tmp_file_path, 'w') as tmp_file:
            # beauty xml file
            xml = parseString('<root>' + xml_file + '</root>')
            # remove first line (XML specification: <? xmlversion="1.0" ?>), <root> and </root> tags, and empty lines
            indent = '  '  # indent parameter for toprettyxml function
            pretty_xml = '\n'.join(
                filter(
                    lambda x: x.strip(),
                    xml.toprettyxml(indent=indent).split('\n')[2:-2])) + '\n'
            # revert xml.dom replacings
            # (https://github.com/python/cpython/blob/8e0418688906206fe59bd26344320c0fc026849e/Lib/xml/dom/minidom.py#L305)
            pretty_xml = pretty_xml.replace("&amp;", "&").replace("&lt;", "<").replace("&quot;", "\"", ) \
                .replace("&gt;", ">").replace('&apos;', "'")
            # delete two first spaces of each line
            final_xml = re.sub(fr'^{indent}',
                               '',
                               pretty_xml,
                               flags=re.MULTILINE)
            tmp_file.write(final_xml)
        chmod(tmp_file_path, 0o640)
    except IOError:
        raise WazuhException(1005)
    except ExpatError:
        raise WazuhException(1113)
    except Exception as e:
        raise WazuhException(1000, str(e))

    try:
        # check xml format
        try:
            load_wazuh_xml(tmp_file_path)
        except Exception as e:
            raise WazuhException(1113, str(e))

        # move temporary file to group folder
        try:
            new_conf_path = join(common.ossec_path, path)
            move(tmp_file_path, new_conf_path, copy_function=copyfile)
        except Error:
            raise WazuhException(1016)
        except Exception:
            raise WazuhException(1000)

        return 'File updated successfully'

    except Exception as e:
        # remove created temporary file if an exception happens
        remove(tmp_file_path)
        raise e
Ejemplo n.º 20
0
def _update_file(file_path,
                 new_content,
                 umask_int=None,
                 mtime=None,
                 w_mode=None,
                 tmp_dir='/queue/cluster',
                 whoami='master',
                 agents=None):

    dst_path = common.ossec_path + file_path
    if path.basename(dst_path) == 'client.keys':
        if whoami == 'worker':
            _check_removed_agents(new_content.split('\n'))
        else:
            logger.warning(
                "[Cluster] Client.keys file received in a master node.")
            raise WazuhException(3007)

    is_agent_info = 'agent-info' in dst_path
    is_agent_group = 'agent-groups' in dst_path
    if is_agent_info or is_agent_group:
        if whoami == 'master':
            agent_names, agent_ids = agents

            if is_agent_info:
                agent_name_re = re.match(r'(^.+)-(.+)$',
                                         path.basename(file_path))
                agent_name = agent_name_re.group(
                    1) if agent_name_re else path.basename(file_path)
                if agent_name not in agent_names:
                    raise WazuhException(3010, agent_name)
            elif is_agent_group:
                agent_id = path.basename(file_path)
                if agent_id not in agent_ids:
                    raise WazuhException(3010, agent_id)

            try:
                mtime = datetime.strptime(mtime, '%Y-%m-%d %H:%M:%S.%f')
            except ValueError:
                mtime = datetime.strptime(mtime, '%Y-%m-%d %H:%M:%S')

            if path.isfile(dst_path):

                local_mtime = datetime.utcfromtimestamp(
                    int(stat(dst_path).st_mtime))
                # check if the date is older than the manager's date
                if local_mtime > mtime:
                    logger.debug2(
                        "[Cluster] Receiving an old file ({})".format(
                            dst_path))  # debug2
                    return
        elif is_agent_info:
            logger.warning("[Cluster] Agent-info received in a worker node.")
            raise WazuhException(3011)

    # Write
    if w_mode == "atomic":
        f_temp = "{}{}{}.cluster.tmp".format(common.ossec_path, tmp_dir,
                                             file_path)
    else:
        f_temp = '{0}'.format(dst_path)

    if umask_int:
        oldumask = umask(umask_int)

    try:
        dest_file = open(f_temp, "w")
    except IOError as e:
        if e.errno == errno.ENOENT:
            dirpath = path.dirname(f_temp)
            mkdir_with_mode(dirpath)
            chmod(dirpath, S_IRWXU | S_IRWXG)
            dest_file = open(f_temp, "w")
        else:
            raise e

    dest_file.write(new_content)

    if umask_int:
        umask(oldumask)

    dest_file.close()

    if mtime:
        mtime_epoch = timegm(mtime.timetuple())
        utime(f_temp, (mtime_epoch, mtime_epoch))  # (atime, mtime)

    # Atomic
    if w_mode == "atomic":
        dirpath = path.dirname(dst_path)
        if not os.path.exists(dirpath):
            mkdir_with_mode(dirpath)
            chmod(path.dirname(dst_path), S_IRWXU | S_IRWXG)
        chown(f_temp, common.ossec_uid, common.ossec_gid)
        rename(f_temp, dst_path)
Ejemplo n.º 21
0
def last_scan(agent_id):
    """
    Gets the last scan of the agent.

    :param agent_id: Agent ID.
    :return: Dictionary: end, start.
    """
    my_agent = Agent(agent_id)
    # if agent status is never connected, a KeyError happens
    try:
        agent_version = my_agent.get_basic_information(
            select={'fields': ['version']})['version']
    except KeyError:
        # if the agent is never connected, it won't have either version (key error) or last scan information.
        return {'start': 'ND', 'end': 'ND'}

    if WazuhVersion(agent_version) < WazuhVersion('Wazuh v3.7.0'):
        db_agent = glob('{0}/{1}-*.db'.format(common.database_path_agents,
                                              agent_id))
        if not db_agent:
            raise WazuhException(1600)
        else:
            db_agent = db_agent[0]
        conn = Connection(db_agent)

        data = {}
        # end time
        query = "SELECT max(date_last) FROM pm_event WHERE log = 'Ending rootcheck scan.'"
        conn.execute(query)
        for tuple in conn:
            data['end'] = tuple['max(date_last)'] if tuple[
                'max(date_last)'] is not None else "ND"

        # start time
        query = "SELECT max(date_last) FROM pm_event WHERE log = 'Starting rootcheck scan.'"
        conn.execute(query)
        for tuple in conn:
            data['start'] = tuple['max(date_last)'] if tuple[
                'max(date_last)'] is not None else "ND"

        return data
    else:
        fim_scan_info = WazuhDBQuerySyscheck(
            agent_id=agent_id,
            query='module=fim',
            offset=0,
            sort=None,
            search=None,
            limit=common.database_limit,
            select={
                'fields': ['end', 'start']
            },
            fields={
                'end': 'end_scan',
                'start': 'start_scan',
                'module': 'module'
            },
            table='scan_info',
            default_sort_field='start_scan').run()['items'][0]

        return fim_scan_info
Ejemplo n.º 22
0
if __name__ == '__main__':
    # get arguments
    args = parser.parse_args()

    try:
        if args.debug:
            logging.getLogger('').setLevel(logging.DEBUG) #10

        # Initialize framework
        myWazuh = Wazuh(get_init=True)

        status = get_status_json()

        if status['enabled'] == 'no':
            raise WazuhException(3000, "The cluster is not enabled")
        elif status['running'] == 'no':
            raise WazuhException(3000, "The cluster is not running")

        if args.push:
            try:
                check_cluster_config(read_config())
            except WazuhException as e:
                raise e

            sync(debug=args.debug)

        elif args.manager is not None and args.files is None and args.force is None:
            logging.error("Invalid argument: -m parameter requires -f (--force) or -l (--files)")

        elif args.files is not None:
Ejemplo n.º 23
0
    def get_ossec_init(self):
        """
        Gets information from /etc/ossec-init.conf.

        :return: ossec-init.conf as dictionary
        """

        try:
            with open(self.OSSEC_INIT, 'r') as f:
                line_regex = re.compile('(^\w+)="(.+)"')
                for line in f:
                    match = line_regex.match(line)
                    if match and len(match.groups()) == 2:
                        key = match.group(1).lower()
                        if key == "version":
                            self.version = match.group(2)
                        elif key == "directory":
                            # Read 'directory' when ossec_path (__init__) is set by default.
                            # It could mean that get_init is True and ossec_path is not used.
                            if self.path == '/var/ossec':
                                self.path = match.group(2)
                                common.set_paths_based_on_ossec(self.path)
                        elif key == "date":
                            self.installation_date = match.group(2)
                        elif key == "type":
                            if (str(match.group(2)) == "server"):
                                self.type = "manager"
                            else:
                                self.type = match.group(2)
        except:
            raise WazuhException(1005, self.OSSEC_INIT)

        # info DB if possible
        try:
            conn = Connection(common.database_path_global)

            query = "SELECT * FROM info"
            conn.execute(query)

            for tuple in conn:
                if tuple[0] == 'max_agents':
                    self.max_agents = tuple[1]
                elif tuple[0] == 'openssl_support':
                    self.openssl_support = tuple[1]
        except:
            self.max_agents = "N/A"
            self.openssl_support = "N/A"

        # Ruleset version
        ruleset_version_file = "{0}/ruleset/VERSION".format(self.path)
        try:
            with open(ruleset_version_file, 'r') as f:
                line_regex = re.compile('(^\w+)="(.+)"')
                for line in f:
                    match = line_regex.match(line)
                    if match and len(match.groups()) == 2:
                        self.ruleset_version = match.group(2)
        except:
            raise WazuhException(1005, ruleset_version_file)

        # Timezone info
        try:
            self.tz_offset = strftime("%z")
            self.tz_name = strftime("%Z")
        except:
            self.tz_offset = None
            self.tz_name = None

        return self.to_dict()
Ejemplo n.º 24
0
    def get_rules_files(status=None, path=None, file=None, offset=0, limit=common.database_limit, sort=None, search=None):
        """
        Gets a list of the rule files.

        :param status: Filters by status: enabled, disabled, all.
        :param path: Filters by path.
        :param file: Filters by filename.
        :param offset: First item to return.
        :param limit: Maximum number of items to return.
        :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
        :param search: Looks for items with the specified string.
        :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
        """
        data = []
        status = Rule.__check_status(status)

        # Rules configuration
        ruleset_conf = configuration.get_ossec_conf(section='ruleset')
        if not ruleset_conf:
            raise WazuhException(1200)

        tmp_data = []
        tags = ['rule_include', 'rule_exclude']
        exclude_filenames = []
        for tag in tags:
            if tag in ruleset_conf:
                item_status = Rule.S_DISABLED if tag == 'rule_exclude' else Rule.S_ENABLED

                if type(ruleset_conf[tag]) is list:
                    items = ruleset_conf[tag]
                else:
                    items = [ruleset_conf[tag]]

                for item in items:
                    item_name = os.path.basename(item)
                    full_dir = os.path.dirname(item)
                    item_dir = os.path.relpath(full_dir if full_dir else common.ruleset_rules_path,
                                               start=common.ossec_path)
                    if tag == 'rule_exclude':
                        exclude_filenames.append(item_name)
                    else:
                        tmp_data.append({'file': item_name, 'path': item_dir, 'status': item_status})

        tag = 'rule_dir'
        if tag in ruleset_conf:
            if type(ruleset_conf[tag]) is list:
                items = ruleset_conf[tag]
            else:
                items = [ruleset_conf[tag]]

            for item_dir in items:
                all_rules = "{0}/{1}/*.xml".format(common.ossec_path, item_dir)

                for item in glob(all_rules):
                    item_name = os.path.basename(item)
                    item_dir = os.path.relpath(os.path.dirname(item), start=common.ossec_path)
                    if item_name in exclude_filenames:
                        item_status = Rule.S_DISABLED
                    else:
                        item_status = Rule.S_ENABLED
                    tmp_data.append({'file': item_name, 'path': item_dir, 'status': item_status})

        data = list(tmp_data)
        for d in tmp_data:
            if status and status != 'all' and status != d['status']:
                data.remove(d)
                continue
            if path and path != d['path']:
                data.remove(d)
                continue
            if file and file != d['file']:
                data.remove(d)
                continue

        if search:
            data = search_array(data, search['value'], search['negation'])

        if sort:
            data = sort_array(data, sort['fields'], sort['order'])
        else:
            data = sort_array(data, ['file'], 'asc')

        return {'items': cut_array(data, offset, limit), 'totalItems': len(data)}
Ejemplo n.º 25
0
    def _add_select_to_query(self):
        if len(self.select['fields']) > 1:
            raise WazuhException(1410)

        WazuhDBQuery._add_select_to_query(self)
Ejemplo n.º 26
0
    def get_rules(offset=0, limit=common.database_limit, sort=None, search=None, filters={}, q=''):
        """
        Gets a list of rules.

        :param offset: First item to return.
        :param limit: Maximum number of items to return.
        :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
        :param search: Looks for items with the specified string.
        :param filters: Defines field filters required by the user. Format: {"field1":"value1", "field2":["value2","value3"]}.
            This filter is used for filtering by 'status', 'group', 'pci', 'gpg13', 'gdpr', 'hipaa', 'nist-800-53',
            'file', 'path', 'id' and 'level'.
        :param q: Defines query to filter.

        :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
        """
        # set default values to parameters
        status = filters.get('status', None)
        group = filters.get('group', None)
        pci = filters.get('pci', None)
        gpg13 = filters.get('gpg13', None)
        gdpr = filters.get('gdpr', None)
        hipaa = filters.get('hipaa', None)
        nist_800_53 = filters.get('nist-800-53', None)
        path = filters.get('path', None)
        file_ = filters.get('file', None)
        id_ = filters.get('id', None)
        level = filters.get('level', None)

        all_rules = []

        if level:
            levels = level.split('-')
            if len(levels) < 0 or len(levels) > 2:
                raise WazuhException(1203)

        for rule_file in Rule.get_rules_files(status=status, limit=None)['items']:
            all_rules.extend(Rule.__load_rules_from_file(rule_file['file'], rule_file['path'], rule_file['status']))

        rules = list(all_rules)
        for r in all_rules:
            if group and group not in r.groups:
                rules.remove(r)
                continue
            elif pci and pci not in r.pci:
                rules.remove(r)
                continue
            elif gpg13 and gpg13 not in r.gpg13:
                rules.remove(r)
                continue
            elif gdpr and gdpr not in r.gdpr:
                rules.remove(r)
                continue
            elif hipaa and hipaa not in r.hipaa:
                rules.remove(r)
                continue
            elif nist_800_53 and nist_800_53 not in r.nist_800_53:
                rules.remove(r)
                continue
            elif path and path != r.path:
                rules.remove(r)
                continue
            elif file_ and file_ != r.file:
                rules.remove(r)
                continue
            elif id_ and int(id_) != r.id:
                rules.remove(r)
                continue
            elif level:
                if len(levels) == 1:
                    if int(levels[0]) != r.level:
                        rules.remove(r)
                        continue
                elif not (int(levels[0]) <= r.level <= int(levels[1])):
                        rules.remove(r)
                        continue

        if search:
            rules = search_array(rules, search['value'], search['negation'])

        if q:
            # rules contains a list of Rule objects, it is necessary to cast it into dictionaries
            rules = filter_array_by_query(q, [rule.to_dict() for rule in rules])

        if sort:
            rules = sort_array(rules, sort['fields'], sort['order'], Rule.SORT_FIELDS)
        else:
            rules = sort_array(rules, ['id'], 'asc')

        return {'items': cut_array(rules, offset, limit), 'totalItems': len(rules)}
Ejemplo n.º 27
0
 def connect_to_db(self):
     if not glob.glob(self.db_path):
         raise WazuhException(1600)
     return Connection(self.db_path)
Ejemplo n.º 28
0
    def __load_rules_from_file(rule_file, rule_path, rule_status):
        try:
            rules = []

            root = load_wazuh_xml(os.path.join(common.ossec_path, rule_path, rule_file))

            for xml_group in list(root):
                if xml_group.tag.lower() == "group":
                    general_groups = xml_group.attrib['name'].split(',')
                    for xml_rule in list(xml_group):
                        # New rule
                        if xml_rule.tag.lower() == "rule":
                            groups = []
                            rule = Rule()
                            rule.file = rule_file
                            rule.path = rule_path
                            rule.id = int(xml_rule.attrib['id'])
                            rule.level = int(xml_rule.attrib['level'])
                            rule.status = rule_status

                            for k in xml_rule.attrib:
                                if k != 'id' and k != 'level':
                                    rule.details[k] = xml_rule.attrib[k]

                            for xml_rule_tags in list(xml_rule):
                                tag = xml_rule_tags.tag.lower()
                                value = xml_rule_tags.text
                                if value == None:
                                    value = ''
                                if tag == "group":
                                    groups.extend(value.split(","))
                                elif tag == "description":
                                    rule.description += value
                                elif tag == "field":
                                    rule.add_detail(xml_rule_tags.attrib['name'], value)
                                elif tag in ("list", "info"):
                                    list_detail = {'name': value}
                                    for attrib, attrib_value in xml_rule_tags.attrib.items():
                                        list_detail[attrib] = attrib_value
                                    rule.add_detail(tag, list_detail)
                                # show rule variables
                                elif tag in {'regex', 'match', 'user', 'id'} and value != '' and value[0] == "$":
                                    for variable in filter(lambda x: x.get('name') == value[1:], root.findall('var')):
                                        rule.add_detail(tag, variable.text)
                                else:
                                    rule.add_detail(tag, value)

                            # Set groups
                            groups.extend(general_groups)

                            pci_groups = []
                            gpg13_groups = []
                            gdpr_groups = []
                            hippa_groups = []
                            nist_800_53_groups = []
                            ossec_groups = []
                            for g in groups:
                                if 'pci_dss_' in g:
                                    pci_groups.append(g.strip()[8:])
                                elif 'gpg13_' in g:
                                    gpg13_groups.append(g.strip()[6:])
                                elif 'gdpr_' in g:
                                    gdpr_groups.append(g.strip()[5:])
                                elif 'hipaa_' in g:
                                    hippa_groups.append(g.strip()[6:])
                                elif 'nist_800_53_' in g:
                                    nist_800_53_groups.append(g.strip()[12:])
                                else:
                                    ossec_groups.append(g)

                            rule.set_pci(pci_groups)
                            rule.set_gpg13(gpg13_groups)
                            rule.set_gdpr(gdpr_groups)
                            rule.set_hipaa(hippa_groups)
                            rule.set_nist_800_53(nist_800_53_groups)
                            rule.set_group(ossec_groups)

                            rules.append(rule)
        except Exception as e:
            raise WazuhException(1201, "{0}. Error: {1}".format(rule_file, str(e)))

        return rules
Ejemplo n.º 29
0
def sync_one_node(debug, node, force=False):
    """
    Sync files with only one node
    """
    synchronization_date = time()
    synchronization_duration = 0.0

    config_cluster = read_config()
    if not config_cluster:
        raise WazuhException(3000, "No config found")

    cluster_items = get_cluster_items()

    before = time()
    # Get own items status
    own_items = list_files_from_filesystem(config_cluster['node_type'],
                                           cluster_items)
    own_items_names = own_items.keys()

    cluster_socket = connect_to_db_socket()
    logging.debug("Connected to cluster database socket")

    if force:
        clear_file_status_one_node(node, cluster_socket)
    all_files = get_file_status_of_one_node(node, own_items_names,
                                            cluster_socket)

    after = time()
    synchronization_duration += after - before
    logging.debug("Time retrieving info from DB: {0}".format(after - before))

    before = time()
    result_queue = queue()
    push_updates_single_node(all_files, node, config_cluster, result_queue)

    after = time()
    synchronization_duration += after - before
    logging.debug("Time sending info: {0}".format(after - before))
    before = time()

    result = result_queue.get()
    update_node_db_after_sync(result, node, cluster_socket)
    after = time()
    synchronization_duration += after - before

    send_to_socket(cluster_socket, "clearlast")
    received = receive_data_from_db_socket(cluster_socket)
    send_to_socket(
        cluster_socket,
        "updatelast {0} {1}".format(synchronization_date,
                                    int(synchronization_duration)))
    received = receive_data_from_db_socket(cluster_socket)

    cluster_socket.close()
    logging.debug("Time updating DB: {0}".format(after - before))

    if debug:
        return result
    else:
        return {
            'updated': len(result['files']['updated']),
            'error': result['files']['error'],
            'invalid': result['files']['invalid'],
            'error': result['error'],
            'reason': result['reason']
        }
Ejemplo n.º 30
0
 def _connect(self):
     try:
         self.s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
         self.s.connect(self.path)
     except:
         raise WazuhException(1013, self.path)