Esempio n. 1
0
def check_cluster_config(config):
    iv = InputValidator()
    reservated_ips = {'localhost', 'NODE_IP', '0.0.0.0', '127.0.1.1'}

    if len(config['key']) == 0:
        raise WazuhError(3004, 'Unspecified key')
    elif not iv.check_name(config['key']) or not iv.check_length(
            config['key'], 32, eq):
        raise WazuhError(
            3004,
            'Key must be 32 characters long and only have alphanumeric characters'
        )

    elif config['node_type'] != 'master' and config['node_type'] != 'worker':
        raise WazuhError(
            3004,
            'Invalid node type {0}. Correct values are master and worker'.
            format(config['node_type']))

    elif not 1024 < config['port'] < 65535:
        raise WazuhError(
            3004, "Port must be higher than 1024 and lower than 65535.")

    if len(config['nodes']) > 1:
        logger.warning(
            "Found more than one node in configuration. Only master node should be specified. Using {} as master."
            .format(config['nodes'][0]))

    invalid_elements = list(reservated_ips & set(config['nodes']))

    if len(invalid_elements) != 0:
        raise WazuhError(
            3004, "Invalid elements in node fields: {0}.".format(
                ', '.join(invalid_elements)))
Esempio n. 2
0
def compress_files(name, list_path, cluster_control_json=None):
    zip_file_path = "{0}/queue/cluster/{1}/{1}-{2}-{3}.zip".format(
        common.ossec_path, name, time(),
        str(random())[2:])
    if not os.path.exists(os.path.dirname(zip_file_path)):
        mkdir_with_mode(os.path.dirname(zip_file_path))
    with zipfile.ZipFile(zip_file_path, 'x') as zf:
        # write files
        if list_path:
            for f in list_path:
                try:
                    zf.write(filename=common.ossec_path + f, arcname=f)
                except zipfile.LargeZipFile as e:
                    raise WazuhError(3001, str(e))
                except Exception as e:
                    logger.error("[Cluster] {}".format(
                        str(WazuhException(3001, str(e)))))

        try:
            zf.writestr("cluster_control.json",
                        json.dumps(cluster_control_json))
        except Exception as e:
            raise WazuhError(3001, str(e))

    return zip_file_path
Esempio n. 3
0
 def decode_body(cls, dikt, unicode_error=None, attribute_error=None):
     try:
         body = dikt.decode('utf-8')
     except UnicodeDecodeError:
         raise_if_exc(WazuhError(unicode_error))
     except AttributeError:
         raise_if_exc(WazuhError(attribute_error))
     return body
Esempio n. 4
0
def upload_xml(xml_file, path):
    """
    Upload XML files (rules and decoders)
    :param xml_file: content of the XML file
    :param path: Destination of the new XML file
    :return: Confirmation message
    """
    # -- characters are not allowed in XML comments
    xml_file = replace_in_comments(xml_file, '--', '%wildcard%')

    # path of temporary files for parsing xml input
    tmp_file_path = '{}/tmp/api_tmp_file_{}_{}.xml'.format(common.ossec_path, time.time(), random.randint(0, 1000))

    # create temporary file for parsing xml input
    try:
        with open(tmp_file_path, 'w') as tmp_file:
            # beauty xml file
            xml = parseString('<root>' + xml_file + '</root>')
            # remove first line (XML specification: <? xmlversion="1.0" ?>), <root> and </root> tags, and empty lines
            indent = '  '  # indent parameter for toprettyxml function
            pretty_xml = '\n'.join(filter(lambda x: x.strip(), xml.toprettyxml(indent=indent).split('\n')[2:-2])) + '\n'
            # revert xml.dom replacings
            # (https://github.com/python/cpython/blob/8e0418688906206fe59bd26344320c0fc026849e/Lib/xml/dom/minidom.py#L305)
            pretty_xml = pretty_xml.replace("&amp;", "&").replace("&lt;", "<").replace("&quot;", "\"", ) \
                .replace("&gt;", ">").replace('&apos;', "'")
            # delete two first spaces of each line
            final_xml = re.sub(fr'^{indent}', '', pretty_xml, flags=re.MULTILINE)
            final_xml = replace_in_comments(final_xml, '%wildcard%', '--')
            tmp_file.write(final_xml)
        chmod(tmp_file_path, 0o660)
    except IOError:
        raise WazuhInternalError(1005)
    except ExpatError:
        raise WazuhError(1113)

    try:
        # check xml format
        try:
            load_wazuh_xml(tmp_file_path)
        except Exception as e:
            raise WazuhError(1113, str(e))

        # move temporary file to group folder
        try:
            new_conf_path = join(common.ossec_path, path)
            safe_move(tmp_file_path, new_conf_path, permissions=0o660)
        except Error:
            raise WazuhInternalError(1016)

        return WazuhResult({'message': 'File updated successfully'})

    except Exception as e:
        # remove created temporary file if an exception happens
        remove(tmp_file_path)
        raise e
Esempio n. 5
0
def check_cluster_config(config):
    """Verify that cluster configuration is correct.

    Following points are checked:
        - Cluster config block is not empty.
        - len(key) == 32 and only alphanumeric characters are used.
        - node_type is 'master' or 'worker'.
        - 1024 < port < 65535.
        - Only 1 node is specified.
        - Reserved IPs are not used.

    Parameters
    ----------
    config : dict
        Cluster configuration.

    Raises
    -------
    WazuhError
        If any of above conditions is not met.
    """
    iv = InputValidator()
    reservated_ips = {'localhost', 'NODE_IP', '0.0.0.0', '127.0.1.1'}

    if len(config['key']) == 0:
        raise WazuhError(3004, 'Unspecified key')
    elif not iv.check_name(config['key']) or not iv.check_length(
            config['key'], 32, eq):
        raise WazuhError(
            3004,
            'Key must be 32 characters long and only have alphanumeric characters'
        )

    elif config['node_type'] != 'master' and config['node_type'] != 'worker':
        raise WazuhError(
            3004,
            'Invalid node type {0}. Correct values are master and worker'.
            format(config['node_type']))

    elif not 1024 < config['port'] < 65535:
        raise WazuhError(
            3004, "Port must be higher than 1024 and lower than 65535.")

    if len(config['nodes']) > 1:
        logger.warning(
            "Found more than one node in configuration. Only master node should be specified. Using {} as master."
            .format(config['nodes'][0]))

    invalid_elements = list(reservated_ips & set(config['nodes']))

    if len(invalid_elements) != 0:
        raise WazuhError(
            3004, "Invalid elements in node fields: {0}.".format(
                ', '.join(invalid_elements)))
Esempio n. 6
0
def compress_files(name, list_path, cluster_control_json=None):
    """Create a zip with cluster_control.json and the files listed in list_path.

    Iterate the list of files and groups them in the zip. If a file does not
    exist, the cluster_control_json dictionary is updated.

    Parameters
    ----------
    name : str
        Name of the node to which the zip will be sent.
    list_path : list
        List of file paths to be zipped.
    cluster_control_json : dict
        KO files (path-metadata) to be zipped as a json.

    Returns
    -------
    zip_file_path : str
        Path where the zip file has been saved.
    """
    failed_files = list()
    zip_file_path = "{0}/queue/cluster/{1}/{1}-{2}-{3}.zip".format(
        common.ossec_path, name, time(),
        str(random())[2:])
    if not os.path.exists(os.path.dirname(zip_file_path)):
        mkdir_with_mode(os.path.dirname(zip_file_path))
    with zipfile.ZipFile(zip_file_path, 'x') as zf:
        # write files
        if list_path:
            for f in list_path:
                try:
                    zf.write(filename=common.ossec_path + f, arcname=f)
                except zipfile.LargeZipFile as e:
                    raise WazuhError(3001, str(e))
                except Exception as e:
                    logger.debug("[Cluster] {}".format(
                        str(WazuhException(3001, str(e)))))
                    failed_files.append(f)
        try:
            if cluster_control_json and failed_files:
                update_cluster_control_with_failed(failed_files,
                                                   cluster_control_json)
            zf.writestr("cluster_control.json",
                        json.dumps(cluster_control_json))
        except Exception as e:
            raise WazuhError(3001, str(e))

    return zip_file_path
Esempio n. 7
0
def update_api_conf(new_config):
    """Update the API.yaml file.

    Parameters
    ----------
    new_config : dict
        Dictionary with the new configuration.
    """
    if new_config:
        'remote_commands' in new_config.keys() and new_config.pop(
            'remote_commands')
        try:
            with open(common.api_config_path, 'r') as f:
                # Avoid changing the "remote_commands" option through the Framework
                previous_config = yaml.safe_load(f)
                if previous_config and 'remote_commands' in previous_config.keys(
                ):
                    new_config['remote_commands'] = previous_config[
                        'remote_commands']
            with open(common.api_config_path, 'w+') as f:
                yaml.dump(new_config, f)
        except IOError:
            raise WazuhInternalError(1005)
    else:
        raise WazuhError(1105)
Esempio n. 8
0
def upload_list(list_file, path):
    """
    Updates CDB lists
    :param list_file: content of the list
    :param path: Destination of the new list file
    :return: Confirmation message.
    """
    # path of temporary file
    tmp_file_path = '{}/tmp/api_tmp_file_{}_{}.txt'.format(common.ossec_path, time.time(), random.randint(0, 1000))

    try:
        # create temporary file
        with open(tmp_file_path, 'w') as tmp_file:
            # write json in tmp_file_path
            for element in list_file.splitlines():
                # skip empty lines
                if not element:
                    continue
                tmp_file.write(element.strip() + '\n')
        chmod(tmp_file_path, 0o640)
    except IOError:
        raise WazuhInternalError(1005)

    # validate CDB list
    if not validate_cdb_list(tmp_file_path):
        raise WazuhError(1800)

    # move temporary file to group folder
    try:
        new_conf_path = join(common.ossec_path, path)
        safe_move(tmp_file_path, new_conf_path, permissions=0o660)
    except Error:
        raise WazuhInternalError(1016)

    return WazuhResult({'message': 'File updated successfully'})
Esempio n. 9
0
def test_model_from_dict():
    """Test class Model `from_dict` method."""
    exc = WazuhError(1000)
    with pytest.raises(exc.__class__):
        bm.Model.from_dict(exc)

    dikt = {'test_key': 'test_value'}
    assert bm.Model.from_dict(dikt) == deserialize_model(dikt, bm.Model)
Esempio n. 10
0
def prettify_xml(xml_file):
    """Prettify XML files (rules, decoders and ossec.conf)

    Parameters
    ----------
    xml_file : str
        Content of the XML file

    Returns
    -------
    Checked XML content
    """
    # -- characters are not allowed in XML comments
    xml_file = replace_in_comments(xml_file, '--', '%wildcard%')

    # create temporary file for parsing xml input
    try:
        # beauty xml file
        xml = parseString('<root>' + xml_file + '</root>')
        # remove first line (XML specification: <? xmlversion="1.0" ?>), <root> and </root> tags, and empty lines
        indent = '  '  # indent parameter for toprettyxml function
        pretty_xml = '\n'.join(
            filter(lambda x: x.strip(),
                   xml.toprettyxml(indent=indent).split('\n')[2:-2])) + '\n'
        # revert xml.dom replacings
        # (https://github.com/python/cpython/blob/8e0418688906206fe59bd26344320c0fc026849e/Lib/xml/dom/minidom.py#L305)
        pretty_xml = pretty_xml.replace("&amp;", "&").replace("&lt;", "<").replace("&quot;", "\"", ) \
            .replace("&gt;", ">").replace('&apos;', "'")
        # delete two first spaces of each line
        final_xml = re.sub(fr'^{indent}', '', pretty_xml, flags=re.MULTILINE)
        final_xml = replace_in_comments(final_xml, '%wildcard%', '--')

        # Check if remote commands are allowed
        check_remote_commands(final_xml)
        # Check xml format
        load_wazuh_xml(xml_path='', data=final_xml)

        return final_xml
    except ExpatError:
        raise WazuhError(1113)
    except WazuhError as e:
        raise e
    except Exception as e:
        raise WazuhError(1113, str(e))
Esempio n. 11
0
def test_DistributedAPI_local_request(mock_local_request):
    """Test `local_request` method from class DistributedAPI and check the behaviour when an error raise."""
    dapi_kwargs = {'f': manager.status, 'logger': logger}
    raise_if_exc_routine(dapi_kwargs=dapi_kwargs)

    dapi_kwargs = {
        'f': cluster.get_nodes_info,
        'logger': logger,
        'local_client_arg': 'lc'
    }
    raise_if_exc_routine(dapi_kwargs=dapi_kwargs)

    dapi_kwargs['is_async'] = True
    raise_if_exc_routine(dapi_kwargs=dapi_kwargs)

    with patch('asyncio.wait_for',
               new=AsyncMock(side_effect=TimeoutError('Testing'))):
        dapi = DistributedAPI(f=manager.status, logger=logger)
        try:
            raise_if_exc(loop.run_until_complete(dapi.distribute_function()))
        except ProblemException as e:
            assert e.ext['dapi_errors'][list(e.ext['dapi_errors'].keys())[0]]['error'] == \
                   'Timeout executing API request'

    with patch('asyncio.wait_for',
               new=AsyncMock(side_effect=WazuhError(1001))):
        dapi_kwargs = {'f': manager.status, 'logger': logger}
        raise_if_exc_routine(dapi_kwargs=dapi_kwargs, expected_error=1001)

        dapi_kwargs['debug'] = True
        raise_if_exc_routine(dapi_kwargs=dapi_kwargs, expected_error=1001)

    with patch('asyncio.wait_for',
               new=AsyncMock(side_effect=WazuhInternalError(1001))):
        dapi_kwargs = {'f': manager.status, 'logger': logger}
        raise_if_exc_routine(dapi_kwargs=dapi_kwargs, expected_error=1001)

        dapi = DistributedAPI(f=manager.status, logger=logger, debug=True)
        try:
            raise_if_exc(loop.run_until_complete(dapi.distribute_function()))
        except WazuhInternalError as e:
            assert e.code == 1001

    with patch('asyncio.wait_for',
               new=AsyncMock(side_effect=KeyError('Testing'))):
        dapi_kwargs = {'f': manager.status, 'logger': logger}
        raise_if_exc_routine(dapi_kwargs=dapi_kwargs, expected_error=1000)

        dapi = DistributedAPI(f=manager.status, logger=logger, debug=True)
        try:
            raise_if_exc(loop.run_until_complete(dapi.distribute_function()))
        except Exception as e:
            assert type(e) == KeyError
Esempio n. 12
0
def end_logtest_session(token: str = None):
    """End the logtest session for the introduced token.

    Parameters
    ----------
    token : str
        Logtest session token.

    Returns
    -------
    dict
        Logtest response to the message.
    """
    if token is None:
        raise WazuhError(7001)

    response = send_logtest_msg(command='remove_session',
                                parameters={'token': token})
    if response['error'] != 0:
        raise WazuhError(code=7000,
                         extra_message=response.get(
                             'message', 'Could not parse error message'))

    return response
Esempio n. 13
0
async def test_get_system_nodes():
    """Verify that get_system_nodes function returns the name of all cluster nodes."""
    with patch('wazuh.core.cluster.local_client.LocalClient.execute',
               side_effect=async_local_client):
        expected_result = [{'items': [{'name': 'master'}]}]
        for expected in expected_result:
            with patch('wazuh.core.cluster.control.get_nodes',
                       return_value=expected):
                result = await control.get_system_nodes()
                assert result == [expected['items'][0]['name']]

        expected_exception = WazuhInternalError(3012)
        with patch('wazuh.core.cluster.control.get_nodes',
                   side_effect=WazuhInternalError(3012)):
            result = await control.get_system_nodes()
            assert result == WazuhError(3013)
Esempio n. 14
0
def update_api_conf(new_config):
    """Update the API.yaml file.

    Parameters
    ----------
    new_config : dict
        Dictionary with the new configuration.
    """
    if new_config:
        try:
            with open(common.api_config_path, 'w+') as f:
                yaml.dump(new_config, f)
        except IOError:
            raise WazuhInternalError(1005)
    else:
        raise WazuhError(1105)
Esempio n. 15
0
def test_read_cluster_config():
    """Verify that read_cluster function returns, in this case, the default configuration."""
    config = utils.read_cluster_config()
    assert config == default_cluster_config

    with patch('wazuh.core.cluster.utils.get_ossec_conf',
               side_effect=WazuhError(1001)):
        with pytest.raises(WazuhError, match='.* 3006 .*'):
            utils.read_cluster_config()

    with patch('wazuh.core.configuration.load_wazuh_xml',
               return_value=SystemExit):
        with pytest.raises(SystemExit) as pytest_wrapped_e:
            utils.read_cluster_config(from_import=True)
        assert pytest_wrapped_e.type == SystemExit
        assert pytest_wrapped_e.value.code == 0

    with patch('wazuh.core.cluster.utils.get_ossec_conf',
               side_effect=KeyError(1)):
        with pytest.raises(WazuhError, match='.* 3006 .*'):
            utils.read_cluster_config()

    with patch('wazuh.core.cluster.utils.get_ossec_conf',
               return_value={'cluster': default_cluster_config}):
        utils.read_config.cache_clear()
        default_cluster_config.pop('hidden')
        default_cluster_config['disabled'] = 'no'
        config = utils.read_cluster_config()
        config_simple = utils.read_config()
        assert config == config_simple
        assert config == default_cluster_config

        default_cluster_config['node_type'] = 'client'
        config = utils.read_cluster_config()
        assert config == default_cluster_config

        default_cluster_config['disabled'] = 'None'
        with pytest.raises(WazuhError, match='.* 3004 .*'):
            utils.read_cluster_config()

        default_cluster_config['disabled'] = 'yes'
        config = utils.read_cluster_config()
        assert config == default_cluster_config

        default_cluster_config['port'] = 'None'
        with pytest.raises(WazuhError, match='.* 3004 .*'):
            utils.read_cluster_config()
Esempio n. 16
0
def update_security_conf(new_config):
    """Update dict and write it in the configuration file.

    Parameters
    ----------
    new_config : dict
        Dictionary with the new configuration.
    """
    if new_config:
        try:
            with open(SECURITY_CONFIG_PATH, 'w+') as f:
                yaml.dump(new_config, f)
        except IOError:
            raise WazuhInternalError(1005)
    else:
        raise WazuhError(4021)
    if 'max_login_attempts' in new_config.keys():
        middlewares.ip_stats = dict()
        middlewares.ip_block = set()
    if 'max_request_per_minute' in new_config.keys():
        middlewares.request_counter = 0
Esempio n. 17
0
def parse_execd_output(output: str) -> Dict:
    """
    Parses output from execd socket to fetch log message and remove log date, log daemon, log level, etc.
    :param output: Raw output from execd
    :return: Cleaned log message in a dictionary structure
    """
    json_output = json.loads(output)
    error_flag = json_output['error']
    if error_flag != 0:
        errors = []
        log_lines = json_output['message'].splitlines(keepends=False)
        for line in log_lines:
            match = _re_logtest.match(line)
            if match:
                errors.append(match.group(1))
        errors = list(OrderedDict.fromkeys(errors))
        raise WazuhError(1908, extra_message=', '.join(errors))
    else:
        response = {'status': 'OK'}

    return response
Esempio n. 18
0
def test_read_cluster_config():
    """Verify that read_cluster function returns, in this case, the default configuration."""
    config = utils.read_cluster_config()
    assert config == default_cluster_config

    with patch('wazuh.core.cluster.utils.get_ossec_conf',
               side_effect=WazuhError(1001)):
        with pytest.raises(WazuhError, match='.* 3006 .*'):
            utils.read_cluster_config()

    with patch('wazuh.core.cluster.utils.get_ossec_conf',
               side_effect=KeyError(1)):
        with pytest.raises(WazuhError, match='.* 3006 .*'):
            utils.read_cluster_config()

    with patch('wazuh.core.cluster.utils.get_ossec_conf',
               return_value={'cluster': default_cluster_config}):
        default_cluster_config.pop('hidden')
        default_cluster_config['disabled'] = 'no'
        config = utils.read_cluster_config()
        config_simple = utils.read_config()
        assert config == config_simple
        assert config == default_cluster_config

        default_cluster_config['node_type'] = 'client'
        config = utils.read_cluster_config()
        assert config == default_cluster_config

        default_cluster_config['disabled'] = 'None'
        with pytest.raises(WazuhError, match='.* 3004 .*'):
            utils.read_cluster_config()

        default_cluster_config['disabled'] = 'yes'
        config = utils.read_cluster_config()
        assert config == default_cluster_config

        default_cluster_config['port'] = 'None'
        with pytest.raises(WazuhError, match='.* 3004 .*'):
            utils.read_cluster_config()
Esempio n. 19
0
    async def get_kwargs(cls, request, additional_kwargs: dict = None):
        try:
            dikt = request if isinstance(request,
                                         dict) else await request.json()
            f_kwargs = util.deserialize_model(dikt, cls).to_dict()
        except JSONDecodeError as e:
            raise_if_exc(APIError(code=2005, details=e.msg))

        invalid = {
            key
            for key in dikt.keys() if key not in list(f_kwargs.keys())
        }

        if invalid:
            raise_if_exc(
                WazuhError(
                    5005,
                    extra_message='Invalid field found {}'.format(invalid)))

        if additional_kwargs is not None:
            f_kwargs.update(additional_kwargs)

        return f_kwargs
Esempio n. 20
0
def update_api_conf(new_config):
    """Update dict and subdicts without overriding unspecified keys and write it in the API.yaml file.

    Parameters
    ----------
    new_config : dict
        Dictionary with the new configuration.
    """
    if new_config:
        for key in new_config:
            if key in configuration.api_conf:
                if isinstance(configuration.api_conf[key], dict) and isinstance(new_config[key], dict):
                    configuration.api_conf[key].update(new_config[key])
                else:
                    configuration.api_conf[key] = new_config[key]

        try:
            with open(common.api_config_path, 'w+') as f:
                yaml.dump(configuration.api_conf, f)
        except IOError:
            raise WazuhInternalError(1005)
    else:
        raise WazuhError(1105)
Esempio n. 21
0
def run_logtest(token=None, event=None, log_format=None, location=None):
    """Get the logtest output after sending a JSON to its socket.

    Parameters
    ----------
    token : str, optional
        Logtest session token. Default `None`
    event : str
        Log event.
    log_format : str
        Log format.
    location : str
        Log location.

    Raises
    ------
    WazuhError(7000)
        If there are more kwargs than expected.

    Returns
    -------
    dict
        Logtest response after analyzing the event.
    """
    local_vars = locals()
    # Token is not required
    if local_vars['token'] is None:
        del local_vars['token']

    response = send_logtest_msg(command='log_processing',
                                parameters=local_vars)
    if response['error'] != 0:
        raise WazuhError(code=7000,
                         extra_message=response.get(
                             'message', 'Could not parse error message'))

    return response
Esempio n. 22
0
def update_security_conf(new_config):
    """Update dict and write it in the configuration file.

    Parameters
    ----------
    new_config : dict
        Dictionary with the new configuration.
    """
    configuration.security_conf.update(new_config)

    need_revoke = False
    if new_config:
        for key in new_config:
            if key in configuration.security_conf.keys():
                need_revoke = True
        try:
            with open(SECURITY_CONFIG_PATH, 'w+') as f:
                yaml.dump(configuration.security_conf, f)
        except IOError:
            raise WazuhInternalError(1005)
    else:
        raise WazuhError(4021)

    return need_revoke
Esempio n. 23
0
        else:
            assert result.failed_items == expected_result['failed_items']
        assert result.total_failed_items == expected_result['total_failed_items']
        wdb_close_mock.assert_called()


@pytest.mark.parametrize('agent_version, expected_version_errcode', [
    ('v3.12.0', 1760),
    ('Wazuh v4.2.0', 1760),
    (None, 1015)
])
@pytest.mark.parametrize('agent_list, expected_result, agent_info_list', [
    (['001'], test_result[3], ['001']),
])
@patch('wazuh.core.wdb.WazuhDBConnection.__init__', return_value=None)
@patch('wazuh.core.wdb.WazuhDBConnection.execute', side_effect=WazuhError(1000))
@patch('wazuh.core.wdb.WazuhDBConnection.close')
def test_syscheck_clear_exception(wdb_close_mock, execute_mock, wdb_init_mock, agent_list, expected_result,
                                  agent_info_list, agent_version, expected_version_errcode):
    """Test function `clear` from syscheck module.

    It will force an exception.

    Parameters
    ----------
    agent_list : list
        List of agent IDs.
    expected_result : list
        List of dicts with expected results for every test.
    agent_info_list : list
        List of agent IDs that `syscheck.get_agents_info` will return when mocked.
Esempio n. 24
0
            assert (isinstance(a, str) for a in args)
            assert (isinstance(k, str) for k in kwargs)
        assert isinstance(result, AffectedItemsWazuhResult)
        assert result.affected_items == expected_result['affected_items']
        assert result.total_affected_items == expected_result['total_affected_items']
        if result.failed_items:
            assert next(iter(result.failed_items.values())) == expected_result['failed_items']
        else:
            assert result.failed_items == expected_result['failed_items']
        assert result.total_failed_items == expected_result['total_failed_items']


@pytest.mark.parametrize('agent_list, status_list, expected_result', [
    (['001'], {'status': 'active'}, test_result[3])
])
@patch('wazuh.syscheck.OssecQueue', side_effect=WazuhError(1000))
def test_syscheck_run_exception(ossec_queue_mock, agent_list, status_list, expected_result):
    """Test function `run` from syscheck module.

    It will force an exception.

    Parameters
    ----------
    agent_list : list
        List of agent IDs.
    status_list : list
        List of agent statuses.
    expected_result : list
        List of dicts with expected results for every test.
    """
    with patch('wazuh.syscheck.Agent.get_basic_information', return_value=status_list):
Esempio n. 25
0
def walk_dir(dirname,
             recursive,
             files,
             excluded_files,
             excluded_extensions,
             get_cluster_item_key,
             get_md5=True,
             whoami='master'):
    walk_files = {}

    try:
        entries = listdir(common.ossec_path + dirname)
    except OSError as e:
        raise WazuhError(3015, str(e))

    for entry in entries:
        if entry in excluded_files or reduce(
                add, map(lambda x: entry[-(len(x)):] == x,
                         excluded_extensions)):
            continue

        try:
            full_path = path.join(dirname, entry)
            if entry in files or files == ["all"]:

                if not path.isdir(common.ossec_path + full_path):
                    file_mod_time = datetime.utcfromtimestamp(
                        stat(common.ossec_path + full_path).st_mtime)

                    if whoami == 'worker' and file_mod_time < (
                            datetime.utcnow() - timedelta(minutes=30)):
                        continue

                    entry_metadata = {
                        "mod_time": str(file_mod_time),
                        'cluster_item_key': get_cluster_item_key
                    }
                    if '.merged' in entry:
                        entry_metadata['merged'] = True
                        entry_metadata[
                            'merge_type'] = 'agent-info' if 'agent-info' in entry else 'agent-groups'
                        entry_metadata['merge_name'] = dirname + '/' + entry
                    else:
                        entry_metadata['merged'] = False

                    if get_md5:
                        entry_metadata['md5'] = md5(common.ossec_path +
                                                    full_path)

                    walk_files[full_path] = entry_metadata

            if recursive and path.isdir(common.ossec_path + full_path):
                walk_files.update(
                    walk_dir(full_path, recursive, files, excluded_files,
                             excluded_extensions, get_cluster_item_key,
                             get_md5, whoami))

        except Exception as e:
            logger.error("Could not get checksum of file {}: {}".format(
                entry, e))

    return walk_files
Esempio n. 26
0
def walk_dir(dirname,
             recursive,
             files,
             excluded_files,
             excluded_extensions,
             get_cluster_item_key,
             get_md5=True):
    """Iterate recursively inside a directory, save the path of each found file and obtain its metadata.

    Parameters
    ----------
    dirname : str
        Directory within which to look for files.
    recursive : bool
        Whether to recursively look for files inside found directories.
    files : list
        List of files to obtain information from.
    excluded_files : list
        List of files to ignore.
    excluded_extensions : list
        List of extensions to ignore.
    get_cluster_item_key : str
        Key inside cluster.json['files'] to which each file belongs. This is useful to know what actions to take
        after sending a file from one node to another, depending on the directory the file belongs to.
    get_md5 : bool
        Whether to calculate and save the MD5 hash of the found file.

    Returns
    -------
    walk_files : dict
        Paths (keys) and metadata (values) of the requested files found inside 'dirname'.
    """
    walk_files = {}

    # Get list of all files and directories inside 'dirname'.
    try:
        entries = listdir(os.path.join(common.wazuh_path, dirname))
    except OSError as e:
        raise WazuhError(3015, str(e))

    # Get the information collected in the previous integration process.
    previous_status = common.cluster_integrity_mtime.get()

    for entry in entries:
        # Relative path to listed file.
        full_path = path.join(dirname, entry)

        # If file is inside 'excluded_files' or file extension is inside 'excluded_extensions', skip over.
        if entry in excluded_files or any(
            [entry.endswith(v) for v in excluded_extensions]):
            continue

        try:
            # Relative path to listed file.
            full_path = path.join(dirname, entry)

            # If 'all' files have been requested or entry is in the specified files list.
            current_path = os.path.join(common.wazuh_path, full_path)
            if entry in files or files == ["all"
                                           ] and not path.isdir(current_path):
                file_mod_time = os.path.getmtime(current_path)

                try:
                    if file_mod_time == previous_status[full_path]['mod_time']:
                        # The current file has not changed its mtime since the last integrity process
                        walk_files[full_path] = previous_status[full_path]
                        continue
                except KeyError:
                    pass

                # Create dict with metadata of 'full_path' file.
                entry_metadata = {
                    "mod_time": file_mod_time,
                    'cluster_item_key': get_cluster_item_key
                }
                if '.merged' in entry:
                    entry_metadata['merged'] = True
                    entry_metadata['merge_type'] = 'agent-groups'
                    entry_metadata['merge_name'] = os.path.join(dirname, entry)
                else:
                    entry_metadata['merged'] = False

                if get_md5:
                    entry_metadata['md5'] = md5(
                        os.path.join(common.wazuh_path, full_path))

                # Use the relative file path as a key to save its metadata dictionary.
                walk_files[full_path] = entry_metadata

            if recursive and path.isdir(
                    os.path.join(common.wazuh_path, full_path)):
                walk_files.update(
                    walk_dir(full_path, recursive, files, excluded_files,
                             excluded_extensions, get_cluster_item_key,
                             get_md5))

        except Exception as e:
            logger.error(f"Could not get checksum of file {entry}: {e}")

    return walk_files
Esempio n. 27
0
    affected_item_1 = AffectedItemsWazuhResult(affected_items=deepcopy(agent_list_1))
    affected_item_2 = AffectedItemsWazuhResult(affected_items=deepcopy(agent_list_2))
    failed_item = get_wazuh_failed_item

    # Expect 'affected_items': ['001', '002', '003']
    or_result_1 = affected_item_1 | affected_item_2
    assert set(agent_list_1 + agent_list_2) == set(or_result_1.affected_items)
    assert not or_result_1.failed_items

    # Expect new failed_item
    or_result_2 = or_result_1 | failed_item
    assert or_result_2.failed_items == failed_item.failed_items


@pytest.mark.parametrize('or_item, expected_result', [
    (WazuhError(WAZUH_EXCEPTION_CODE, ids=['001']), AffectedItemsWazuhResult),
    (WazuhError(WAZUH_EXCEPTION_CODE), WazuhException),
    (WazuhException(WAZUH_EXCEPTION_CODE), WazuhException),
    ({'Invalid type': None}, None)
])
def test_results_AffectedItemsWazuhResult___or___exceptions(or_item, expected_result):
    """Test raised exceptions from method `__or__` from class `AffectedItemsWazuhResult`."""
    affected_result = AffectedItemsWazuhResult()
    # Force an exception trying to use __or__ with an invalid type
    try:
        or_result = affected_result | or_item
        assert isinstance(or_result, expected_result)
    except WazuhException as e:
        if e.code != 1000:
            raise e
Esempio n. 28
0
        assert result.total_affected_items == expected_result[
            'total_affected_items']
        if result.failed_items:
            assert next(iter(result.failed_items.values())
                        ) == expected_result['failed_items']
        else:
            assert result.failed_items == expected_result['failed_items']
        assert result.total_failed_items == expected_result[
            'total_failed_items']


@pytest.mark.parametrize('agent_list, status_list, expected_result',
                         [(['001'], {
                             'status': 'active'
                         }, test_result[3])])
@patch('wazuh.syscheck.WazuhQueue', side_effect=WazuhError(1000))
def test_syscheck_run_exception(wazuh_queue_mock, agent_list, status_list,
                                expected_result):
    """Test function `run` from syscheck module.

    It will force an exception.

    Parameters
    ----------
    agent_list : list
        List of agent IDs.
    status_list : list
        List of agent statuses.
    expected_result : list
        List of dicts with expected results for every test.
    """
Esempio n. 29
0
            'total_affected_items']
        if result.failed_items:
            assert next(iter(result.failed_items.values())
                        ) == expected_result['failed_items']
        else:
            assert result.failed_items == expected_result['failed_items']
        assert result.total_failed_items == expected_result[
            'total_failed_items']


@pytest.mark.parametrize('agent_list, expected_result, agent_info_list', [
    (['001'], test_result[3], ['001']),
])
@patch('wazuh.core.wdb.WazuhDBConnection.__init__', return_value=None)
@patch('wazuh.core.wdb.WazuhDBConnection.execute',
       side_effect=WazuhError(1000))
def test_syscheck_clear_exception(execute_mock, wdb_init_mock, agent_list,
                                  expected_result, agent_info_list):
    """Test function `clear` from syscheck module.

    It will force an exception.

    Parameters
    ----------
    agent_list : list
        List of agent IDs.
    expected_result : list
        List of dicts with expected results for every test.
    agent_info_list : list
        List of agent IDs that `syscheck.get_agents_info` will return when mocked.
    """
Esempio n. 30
0
def walk_dir(dirname,
             recursive,
             files,
             excluded_files,
             excluded_extensions,
             get_cluster_item_key,
             get_md5=True,
             whoami='master'):
    """Iterate recursively inside a directory, save the path of each found file and obtain its metadata.

    Parameters
    ----------
    dirname : str
        Directory within which to look for files.
    recursive : bool
        Whether to recursively look for files inside found directories.
    files : list
        List of files to obtain information from.
    excluded_files : list
        List of files to ignore.
    excluded_extensions : list
        List of extensions to ignore.
    get_cluster_item_key : str
        Key inside cluster.json['files'] to which each file belongs. This is useful to know what actions to take
        after sending a file from one node to another, depending on the directory the file belongs to.
    get_md5 : bool
        Whether to calculate and save the MD5 hash of the found file.
    whoami : str
        TODO - To be deprecated.

    Returns
    -------
    walk_files : dict
        Paths (keys) and metadata (values) of the requested files found inside 'dirname'.
    """
    walk_files = {}

    # Get list of all files and directories inside 'dirname'.
    try:
        entries = listdir(common.ossec_path + dirname)
    except OSError as e:
        raise WazuhError(3015, str(e))

    for entry in entries:

        # If file is inside 'excluded_files' or file extension is inside 'excluded_extensions', skip over.
        if entry in excluded_files or reduce(
                add, map(lambda x: entry[-(len(x)):] == x,
                         excluded_extensions)):
            continue

        try:
            # Relative path to listed file.
            full_path = path.join(dirname, entry)

            # If 'all' files have been requested or entry is in the specified files list.
            if entry in files or files == ["all"]:

                if not path.isdir(common.ossec_path + full_path):
                    file_mod_time = datetime.utcfromtimestamp(
                        stat(common.ossec_path + full_path).st_mtime)
                    # TODO - To be deprecated
                    if whoami == 'worker' and file_mod_time < (
                            datetime.utcnow() - timedelta(minutes=30)):
                        continue

                    # Create dict with metadata of 'full_path' file.
                    entry_metadata = {
                        "mod_time": str(file_mod_time),
                        'cluster_item_key': get_cluster_item_key
                    }
                    if '.merged' in entry:
                        entry_metadata['merged'] = True
                        entry_metadata['merge_type'] = 'agent-groups'
                        entry_metadata['merge_name'] = dirname + '/' + entry
                    else:
                        entry_metadata['merged'] = False

                    if get_md5:
                        entry_metadata['md5'] = md5(common.ossec_path +
                                                    full_path)

                    # Use the relative file path as a key to save its metadata dictionary.
                    walk_files[full_path] = entry_metadata

            if recursive and path.isdir(common.ossec_path + full_path):
                walk_files.update(
                    walk_dir(full_path, recursive, files, excluded_files,
                             excluded_extensions, get_cluster_item_key,
                             get_md5, whoami))

        except Exception as e:
            logger.error("Could not get checksum of file {}: {}".format(
                entry, e))

    return walk_files