Esempio n. 1
0
def upload_xml(xml_file, path):
    """
    Upload XML files (rules, decoders and ossec.conf)
    :param xml_file: content of the XML file
    :param path: Destination of the new XML file
    :return: Confirmation message
    """
    # Path of temporary files for parsing xml input
    tmp_file_path = '{}/tmp/api_tmp_file_{}_{}.xml'.format(
        common.ossec_path, time.time(), random.randint(0, 1000))
    try:
        with open(tmp_file_path, 'w') as tmp_file:
            final_xml = prettify_xml(xml_file)
            tmp_file.write(final_xml)
        chmod(tmp_file_path, 0o660)
    except IOError:
        raise WazuhInternalError(1005)

    # Move temporary file to group folder
    try:
        new_conf_path = join(common.ossec_path, path)
        safe_move(tmp_file_path, new_conf_path, permissions=0o660)
    except Error:
        raise WazuhInternalError(1016)

    return WazuhResult({'message': 'File was successfully updated'})
Esempio n. 2
0
        def overwrite_or_create_files(filename: str, data: Dict):
            """
            Updates a file coming from the master
            :param filename: Filename to update
            :param data: File metadata such as modification time, whether it's a merged file or not, etc.
            :return: None
            """
            full_filename_path = common.ossec_path + filename
            if os.path.basename(filename) == 'client.keys':
                self._check_removed_agents("{}{}".format(zip_path, filename), logger)

            if data['merged']:  # worker nodes can only receive agent-groups files
                if data['merge-type'] == 'agent-info':
                    logger.warning("Agent status received in a worker node")
                    raise WazuhInternalError(3011)

                for name, content, _ in wazuh.core.cluster.cluster.unmerge_agent_info('agent-groups', zip_path, filename):
                    full_unmerged_name = os.path.join(common.ossec_path, name)
                    tmp_unmerged_path = full_unmerged_name + '.tmp'
                    with open(tmp_unmerged_path, 'wb') as f:
                        f.write(content)
                    safe_move(tmp_unmerged_path, full_unmerged_name,
                              permissions=self.cluster_items['files'][data['cluster_item_key']]['permissions'],
                              ownership=(common.ossec_uid(), common.ossec_gid())
                              )
            else:
                if not os.path.exists(os.path.dirname(full_filename_path)):
                    utils.mkdir_with_mode(os.path.dirname(full_filename_path))
                safe_move("{}{}".format(zip_path, filename), full_filename_path,
                          permissions=self.cluster_items['files'][data['cluster_item_key']]['permissions'],
                          ownership=(common.ossec_uid(), common.ossec_gid())
                          )
Esempio n. 3
0
def upload_list(list_file, path):
    """
    Updates CDB lists
    :param list_file: content of the list
    :param path: Destination of the new list file
    :return: Confirmation message.
    """
    # path of temporary file
    tmp_file_path = '{}/tmp/api_tmp_file_{}_{}.txt'.format(common.ossec_path, time.time(), random.randint(0, 1000))

    try:
        # create temporary file
        with open(tmp_file_path, 'w') as tmp_file:
            # write json in tmp_file_path
            for element in list_file.splitlines():
                # skip empty lines
                if not element:
                    continue
                tmp_file.write(element.strip() + '\n')
        chmod(tmp_file_path, 0o640)
    except IOError:
        raise WazuhInternalError(1005)

    # validate CDB list
    if not validate_cdb_list(tmp_file_path):
        raise WazuhError(1800)

    # move temporary file to group folder
    try:
        new_conf_path = join(common.ossec_path, path)
        safe_move(tmp_file_path, new_conf_path, permissions=0o660)
    except Error:
        raise WazuhInternalError(1016)

    return WazuhResult({'message': 'File updated successfully'})
Esempio n. 4
0
def upload_xml(xml_file, path):
    """
    Upload XML files (rules and decoders)
    :param xml_file: content of the XML file
    :param path: Destination of the new XML file
    :return: Confirmation message
    """
    # -- characters are not allowed in XML comments
    xml_file = replace_in_comments(xml_file, '--', '%wildcard%')

    # path of temporary files for parsing xml input
    tmp_file_path = '{}/tmp/api_tmp_file_{}_{}.xml'.format(common.ossec_path, time.time(), random.randint(0, 1000))

    # create temporary file for parsing xml input
    try:
        with open(tmp_file_path, 'w') as tmp_file:
            # beauty xml file
            xml = parseString('<root>' + xml_file + '</root>')
            # remove first line (XML specification: <? xmlversion="1.0" ?>), <root> and </root> tags, and empty lines
            indent = '  '  # indent parameter for toprettyxml function
            pretty_xml = '\n'.join(filter(lambda x: x.strip(), xml.toprettyxml(indent=indent).split('\n')[2:-2])) + '\n'
            # revert xml.dom replacings
            # (https://github.com/python/cpython/blob/8e0418688906206fe59bd26344320c0fc026849e/Lib/xml/dom/minidom.py#L305)
            pretty_xml = pretty_xml.replace("&amp;", "&").replace("&lt;", "<").replace("&quot;", "\"", ) \
                .replace("&gt;", ">").replace('&apos;', "'")
            # delete two first spaces of each line
            final_xml = re.sub(fr'^{indent}', '', pretty_xml, flags=re.MULTILINE)
            final_xml = replace_in_comments(final_xml, '%wildcard%', '--')
            tmp_file.write(final_xml)
        chmod(tmp_file_path, 0o660)
    except IOError:
        raise WazuhInternalError(1005)
    except ExpatError:
        raise WazuhError(1113)

    try:
        # check xml format
        try:
            load_wazuh_xml(tmp_file_path)
        except Exception as e:
            raise WazuhError(1113, str(e))

        # move temporary file to group folder
        try:
            new_conf_path = join(common.ossec_path, path)
            safe_move(tmp_file_path, new_conf_path, permissions=0o660)
        except Error:
            raise WazuhInternalError(1016)

        return WazuhResult({'message': 'File updated successfully'})

    except Exception as e:
        # remove created temporary file if an exception happens
        remove(tmp_file_path)
        raise e
Esempio n. 5
0
    def delete_single_group(group_id):
        """Delete a group

        :param group_id: Group ID.
        :return: Confirmation message.
        """
        # Delete group directory (move it to a backup)
        group_path = path.join(common.shared_path, group_id)
        group_backup = path.join(common.backup_path, 'groups',
                                 "{0}_{1}".format(group_id, int(time())))
        if path.exists(group_path):
            safe_move(group_path, group_backup, permissions=0o660)

        msg = "Group '{0}' deleted.".format(group_id)

        return {'message': msg}
Esempio n. 6
0
        def overwrite_or_create_files(filename: str, data: Dict):
            """Update a file coming from the master.

            Move a file which is inside the unzipped directory that comes from master to the path
            specified in 'filename'. If the file is 'merged' type, it is first split into files
            and then moved to their final directory.

            Parameters
            ----------
            filename : str
                Filename inside unzipped dir to update.
            data : dict
                File metadata such as modification time, whether it's a merged file or not, etc.
            """
            full_filename_path = os.path.join(common.wazuh_path, filename)
            if os.path.basename(filename) == 'client.keys':
                self._check_removed_agents(os.path.join(zip_path, filename),
                                           logger)

            if data['merged']:  # worker nodes can only receive agent-groups files
                # Split merged file into individual files inside zipdir (directory containing unzipped files),
                # and then move each one to the destination directory (<ossec_path>/filename).
                for name, content, _ in wazuh.core.cluster.cluster.unmerge_info(
                        'agent-groups', zip_path, filename):
                    full_unmerged_name = os.path.join(common.wazuh_path, name)
                    tmp_unmerged_path = full_unmerged_name + '.tmp'
                    with open(tmp_unmerged_path, 'wb') as f:
                        f.write(content)
                    safe_move(tmp_unmerged_path,
                              full_unmerged_name,
                              permissions=self.cluster_items['files'][
                                  data['cluster_item_key']]['permissions'],
                              ownership=(common.ossec_uid(),
                                         common.ossec_gid()))
            else:
                # Create destination dir if it doesn't exist.
                if not os.path.exists(os.path.dirname(full_filename_path)):
                    utils.mkdir_with_mode(os.path.dirname(full_filename_path))
                # Move the file from zipdir (directory containing unzipped files) to <ossec_path>/filename.
                safe_move(os.path.join(zip_path, filename),
                          full_filename_path,
                          permissions=self.cluster_items['files'][
                              data['cluster_item_key']]['permissions'],
                          ownership=(common.ossec_uid(), common.ossec_gid()))
Esempio n. 7
0
def upload_list_file(filename=None, content=None, overwrite=False):
    """Upload a new list file.

    Parameters
    ----------
    filename : str
        Destination path of the new file.
    content : str
        Content of file to be uploaded.
    overwrite : bool
        True for updating existing files, false otherwise.

    Returns
    -------
    result : AffectedItemsWazuhResult
        Confirmation message.
    """
    result = AffectedItemsWazuhResult(
        all_msg='CDB list file uploaded successfully',
        none_msg='Could not upload CDB list file')
    full_path = join(common.user_lists_path, filename)
    backup_file = ''

    try:
        # Raise WazuhError if CDB list is not valid
        validate_cdb_list(content)

        # If file already exists and overwrite is False, raise exception.
        if not overwrite and exists(full_path):
            raise WazuhError(1905)
        # If file with same name already exists in subdirectory.
        elif get_filenames_paths([filename])[0] != full_path:
            raise WazuhError(1805)
        # Create backup and delete original CDB list.
        elif overwrite and exists(full_path):
            backup_file = f"{full_path}.backup"
            delete_file_with_backup(backup_file, full_path, delete_list_file)

        upload_file(content,
                    to_relative_path(full_path),
                    check_xml_formula_values=False)
        result.affected_items.append(to_relative_path(full_path))
        result.total_affected_items = len(result.affected_items)
        # Remove back up file if no exceptions were raised.
        exists(backup_file) and remove(backup_file)
    except WazuhError as e:
        result.add_failed_item(id_=to_relative_path(full_path), error=e)
    finally:
        # If backup file was not deleted (any exception was raised), it should be restored.
        exists(backup_file) and safe_move(
            backup_file, full_path, permissions=0o660)

    return result
Esempio n. 8
0
def update_ossec_conf(new_conf=None):
    """
    Replace wazuh configuration (ossec.conf) with the provided configuration.

    Parameters
    ----------
    new_conf: str
        The new configuration to be applied.
    """
    result = AffectedItemsWazuhResult(
        all_msg=f"Configuration was successfully updated"
        f"{' in specified node' if node_id != 'manager' else ''}",
        some_msg='Could not update configuration in some nodes',
        none_msg=f"Could not update configuration"
        f"{' in specified node' if node_id != 'manager' else ''}")
    backup_file = f'{common.ossec_conf}.backup'
    try:
        # Check a configuration has been provided
        if not new_conf:
            raise WazuhError(1125)

        # Check if the configuration is valid
        validate_wazuh_xml(new_conf, config_file=True)

        # Create a backup of the current configuration before attempting to replace it
        try:
            copyfile(common.ossec_conf, backup_file)
        except IOError:
            raise WazuhError(1019)

        # Write the new configuration and validate it
        write_ossec_conf(new_conf)
        is_valid = validate_ossec_conf()

        if not isinstance(is_valid, dict) or ('status' in is_valid
                                              and is_valid['status'] != 'OK'):
            raise WazuhError(1125)
        else:
            result.affected_items.append(node_id)
        exists(backup_file) and remove(backup_file)
    except WazuhError as e:
        result.add_failed_item(id_=node_id, error=e)
    finally:
        exists(backup_file) and safe_move(backup_file, common.ossec_conf)

    result.total_affected_items = len(result.affected_items)
    return result
Esempio n. 9
0
def upload_decoder_file(filename: str,
                        content: str,
                        overwrite: bool = False) -> AffectedItemsWazuhResult:
    """Upload a new decoder file or update an existing one.

    Parameters
    ----------
    filename : str
        Name of the decoder file.
    content : str
        Content of the file. It must be a valid XML file.
    overwrite : bool
        True for updating existing files. False otherwise.

    Returns
    -------
    AffectedItemsWazuhResult
    """
    result = AffectedItemsWazuhResult(
        all_msg='Decoder was successfully uploaded',
        none_msg='Could not upload decoder')
    full_path = join(common.user_decoders_path, filename)
    backup_file = ''
    try:
        if len(content) == 0:
            raise WazuhError(1112)

        validate_wazuh_xml(content)
        # If file already exists and overwrite is False, raise exception
        if not overwrite and exists(full_path):
            raise WazuhError(1905)
        elif overwrite and exists(full_path):
            backup_file = f'{full_path}.backup'
            delete_file_with_backup(backup_file, full_path,
                                    delete_decoder_file)

        upload_file(content, to_relative_path(full_path))
        result.affected_items.append(to_relative_path(full_path))
        result.total_affected_items = len(result.affected_items)
        backup_file and exists(backup_file) and remove(backup_file)
    except WazuhError as e:
        result.add_failed_item(id_=to_relative_path(full_path), error=e)
    finally:
        exists(backup_file) and safe_move(
            backup_file, full_path, permissions=0o0660)

    return result
Esempio n. 10
0
def upload_group_configuration(group_id, file_content):
    """
    Updates group configuration
    :param group_id: Group to update
    :param file_content: File content of the new configuration in a string.
    :return: Confirmation message.
    """
    if not os_path.exists(os_path.join(common.shared_path, group_id)):
        raise WazuhResourceNotFound(1710, group_id)
    # path of temporary files for parsing xml input
    tmp_file_path = os_path.join(
        common.ossec_path, "tmp",
        f"api_tmp_file_{time.time()}_{random.randint(0, 1000)}.xml")
    # create temporary file for parsing xml input and validate XML format
    try:
        with open(tmp_file_path, 'w') as tmp_file:
            custom_entities = {
                '_custom_open_tag_': '\\<',
                '_custom_close_tag_': '\\>',
                '_custom_amp_lt_': '&lt;',
                '_custom_amp_gt_': '&gt;'
            }

            # Replace every custom entity
            for character, replacement in custom_entities.items():
                file_content = re.sub(replacement.replace('\\', '\\\\'),
                                      character, file_content)

            # Beautify xml file using a minidom.Document
            xml = parseString(f'<root>\n{file_content}\n</root>')

            # Remove first line (XML specification: <? xmlversion="1.0" ?>), <root> and </root> tags, and empty lines
            pretty_xml = '\n'.join(
                filter(lambda x: x.strip(),
                       xml.toprettyxml(indent='  ').split('\n')[2:-2])) + '\n'

            # Revert xml.dom replacements and remove any whitespaces and '\n' between '\' and '<' if present
            # github.com/python/cpython/blob/8e0418688906206fe59bd26344320c0fc026849e/Lib/xml/dom/minidom.py#L305
            pretty_xml = re.sub(
                r'(?:(?<=\\) +)', '',
                pretty_xml.replace("&amp;", "&").replace("&lt;", "<").replace(
                    "&quot;",
                    "\"",
                ).replace("&gt;", ">").replace("\\\n", "\\"))

            # Restore the replaced custom entities
            for replacement, character in custom_entities.items():
                pretty_xml = re.sub(replacement,
                                    character.replace('\\',
                                                      '\\\\'), pretty_xml)

            tmp_file.write(pretty_xml)
    except Exception as e:
        raise WazuhError(1113, str(e))

    try:
        # check Wazuh xml format
        try:
            subprocess.check_output([
                os_path.join(common.ossec_path, "bin", "verify-agent-conf"),
                '-f', tmp_file_path
            ],
                                    stderr=subprocess.STDOUT)
        except subprocess.CalledProcessError as e:
            # extract error message from output.
            # Example of raw output
            # 2019/01/08 14:51:09 verify-agent-conf: ERROR: (1230):
            # Invalid element in the configuration: 'agent_conf'.\n2019/01/08 14:51:09 verify-agent-conf: ERROR: (1207):
            # Syscheck remote configuration in '/var/ossec/tmp/api_tmp_file_2019-01-08-01-1546959069.xml' is corrupted.
            # \n\n
            # Example of desired output:
            # Invalid element in the configuration: 'agent_conf'.
            # Syscheck remote configuration in '/var/ossec/tmp/api_tmp_file_2019-01-08-01-1546959069.xml' is corrupted.
            output_regex = re.findall(
                pattern=
                r"\d{4}\/\d{2}\/\d{2} \d{2}:\d{2}:\d{2} verify-agent-conf: ERROR: "
                r"\(\d+\): ([\w \/ \_ \- \. ' :]+)",
                string=e.output.decode())
            if output_regex:
                raise WazuhError(1114, ' '.join(output_regex))
            else:
                raise WazuhError(1115, e.output.decode())
        except Exception as e:
            raise WazuhInternalError(1743, str(e))

        # move temporary file to group folder
        try:
            new_conf_path = os_path.join(common.shared_path, group_id,
                                         "agent.conf")
            safe_move(tmp_file_path, new_conf_path, permissions=0o660)
        except Exception as e:
            raise WazuhInternalError(1016, extra_message=str(e))

        return 'Agent configuration was successfully updated'
    except Exception as e:
        # remove created temporary file
        if os.path.exists(tmp_file_path):
            remove(tmp_file_path)
        raise e
Esempio n. 11
0
        async def update_file(name: str, data: Dict):
            """Update a local file with one received from a worker.

            The modification date is checked to decide whether to update ir or not.

            Parameters
            ----------
            name : str
                Relative path of the file.
            data : dict
                Metadata of the file (MD5, merged, etc).
            """
            # Full path
            full_path, error_updating_file = os.path.join(
                common.wazuh_path, name), False

            try:
                # Only valid client.keys is the local one (master).
                if os.path.basename(name) == 'client.keys':
                    self.logger.warning(
                        "Client.keys received in a master node")
                    raise exception.WazuhClusterError(3007)

                # If the file is merged, create individual files from it.
                if data['merged']:
                    for file_path, file_data, file_time in wazuh.core.cluster.cluster.unmerge_info(
                            data['merge_type'], decompressed_files_path,
                            data['merge_name']):
                        # Destination path.
                        full_unmerged_name = os.path.join(
                            common.wazuh_path, file_path)
                        # Path where to create the file before moving it to the destination path (with safe_move).
                        tmp_unmerged_path = os.path.join(
                            common.wazuh_path, 'queue', 'cluster', self.name,
                            os.path.basename(file_path))

                        try:
                            agent_id = os.path.basename(file_path)
                            # If the agent does not exist on the master, do not copy its file from the worker.
                            if agent_id not in agent_ids:
                                n_errors['warnings'][data['cluster_item_key']] = 1 \
                                    if n_errors['warnings'].get(data['cluster_item_key']) is None \
                                    else n_errors['warnings'][data['cluster_item_key']] + 1

                                self.logger.debug2(
                                    f"Received group of an non-existent agent '{agent_id}'"
                                )
                                continue

                            # Format the file_data specified inside the merged file.
                            try:
                                mtime = datetime.strptime(
                                    file_time, '%Y-%m-%d %H:%M:%S.%f')
                            except ValueError:
                                mtime = datetime.strptime(
                                    file_time, '%Y-%m-%d %H:%M:%S')

                            # If the file already existed, check if it is older than the one to be copied from worker.
                            if os.path.isfile(full_unmerged_name):
                                local_mtime = datetime.utcfromtimestamp(
                                    int(os.stat(full_unmerged_name).st_mtime))
                                if local_mtime > mtime:
                                    logger.debug2(
                                        f"Receiving an old file ({file_path})")
                                    continue

                            # Create file in temporal path and safe move it to the destination path.
                            with open(tmp_unmerged_path, 'wb') as f:
                                f.write(file_data)

                            mtime_epoch = timegm(mtime.timetuple())
                            utils.safe_move(
                                tmp_unmerged_path,
                                full_unmerged_name,
                                ownership=(common.ossec_uid(),
                                           common.ossec_gid()),
                                permissions=self.cluster_items['files'][
                                    data['cluster_item_key']]['permissions'],
                                time=(mtime_epoch, mtime_epoch))
                            self.integrity_sync_status[
                                'total_extra_valid'] += 1
                        except Exception as e:
                            self.logger.error(
                                f"Error updating agent group/status ({tmp_unmerged_path}): {e}"
                            )

                            n_errors['errors'][data['cluster_item_key']] = 1 \
                                if n_errors['errors'].get(data['cluster_item_key']) is None \
                                else n_errors['errors'][data['cluster_item_key']] + 1
                        await asyncio.sleep(0.0001)

                # If the file is not merged, move it directly to the destination path.
                else:
                    zip_path = os.path.join(decompressed_files_path, name)
                    utils.safe_move(zip_path,
                                    full_path,
                                    ownership=(common.ossec_uid(),
                                               common.ossec_gid()),
                                    permissions=self.cluster_items['files']
                                    [data['cluster_item_key']]['permissions'])

            except exception.WazuhException as e:
                logger.debug2(f"Warning updating file '{name}': {e}")
                error_tag = 'warnings'
                error_updating_file = True
            except Exception as e:
                logger.debug2(f"Error updating file '{name}': {e}")
                error_tag = 'errors'
                error_updating_file = True

            if error_updating_file:
                n_errors[error_tag][data['cluster_item_key']] = 1 if not n_errors[error_tag].get(
                    data['cluster_item_key']) \
                    else n_errors[error_tag][data['cluster_item_key']] + 1
Esempio n. 12
0
        async def update_file(name: str, data: Dict):
            """
            Updates a file from the worker. It checks the modification date to decide whether to update it or not.
            If it's a merged file, it unmerges it.
            :param name: Filename to update
            :param data: File metadata
            :return: None
            """
            # Full path
            full_path, error_updating_file, n_merged_files = common.ossec_path + name, False, 0

            # Cluster items information: write mode and permissions
            lock_full_path = "{}/queue/cluster/lockdir/{}.lock".format(common.ossec_path, os.path.basename(full_path))
            lock_file = open(lock_full_path, 'a+')
            try:
                fcntl.lockf(lock_file, fcntl.LOCK_EX)
                if os.path.basename(name) == 'client.keys':
                    self.logger.warning("Client.keys received in a master node")
                    raise exception.WazuhClusterError(3007)
                if data['merged']:
                    self.sync_extra_valid_status['total_extra_valid'] = len(agent_ids)
                    for file_path, file_data, file_time in wazuh.core.cluster.cluster.unmerge_info(data['merge_type'],
                                                                                                   decompressed_files_path,
                                                                                                   data['merge_name']):
                        full_unmerged_name = os.path.join(common.ossec_path, file_path)
                        tmp_unmerged_path = os.path.join(common.ossec_path, 'queue/cluster', self.name, os.path.basename(file_path))
                        try:
                            agent_id = os.path.basename(file_path)
                            if agent_id not in agent_ids:
                                n_errors['warnings'][data['cluster_item_key']] = 1 \
                                    if n_errors['warnings'].get(data['cluster_item_key']) is None \
                                    else n_errors['warnings'][data['cluster_item_key']] + 1

                                self.logger.debug2("Received group of an non-existent agent '{}'".format(agent_id))
                                continue

                            try:
                                mtime = datetime.strptime(file_time, '%Y-%m-%d %H:%M:%S.%f')
                            except ValueError:
                                mtime = datetime.strptime(file_time, '%Y-%m-%d %H:%M:%S')

                            if os.path.isfile(full_unmerged_name):

                                local_mtime = datetime.utcfromtimestamp(int(os.stat(full_unmerged_name).st_mtime))
                                # check if the date is older than the manager's date
                                if local_mtime > mtime:
                                    logger.debug2("Receiving an old file ({})".format(file_path))
                                    continue

                            with open(tmp_unmerged_path, 'wb') as f:
                                f.write(file_data)

                            mtime_epoch = timegm(mtime.timetuple())
                            utils.safe_move(tmp_unmerged_path, full_unmerged_name,
                                            ownership=(common.ossec_uid(), common.ossec_gid()),
                                            permissions=self.cluster_items['files'][data['cluster_item_key']]['permissions'],
                                            time=(mtime_epoch, mtime_epoch)
                                            )
                        except Exception as e:
                            self.logger.error("Error updating agent group/status ({}): {}".format(tmp_unmerged_path, e))
                            self.sync_extra_valid_status['total_extra_valid'] -= 1

                            n_errors['errors'][data['cluster_item_key']] = 1 \
                                if n_errors['errors'].get(data['cluster_item_key']) is None \
                                else n_errors['errors'][data['cluster_item_key']] + 1
                        await asyncio.sleep(0.0001)

                else:
                    zip_path = "{}{}".format(decompressed_files_path, name)
                    utils.safe_move(zip_path, full_path,
                                    ownership=(common.ossec_uid(), common.ossec_gid()),
                                    permissions=self.cluster_items['files'][data['cluster_item_key']]['permissions']
                                    )

            except exception.WazuhException as e:
                logger.debug2("Warning updating file '{}': {}".format(name, e))
                error_tag = 'warnings'
                error_updating_file = True
            except Exception as e:
                logger.debug2("Error updating file '{}': {}".format(name, e))
                error_tag = 'errors'
                error_updating_file = True

            if error_updating_file:
                n_errors[error_tag][data['cluster_item_key']] = 1 if not n_errors[error_tag].get(
                    data['cluster_item_key']) \
                    else n_errors[error_tag][data['cluster_item_key']] + 1

            fcntl.lockf(lock_file, fcntl.LOCK_UN)
            lock_file.close()
Esempio n. 13
0
    def process_files_from_worker(files_metadata: Dict,
                                  decompressed_files_path: str,
                                  cluster_items: dict, worker_name: str,
                                  timeout: int):
        """Iterate over received files from worker and updates the local ones.

        Parameters
        ----------
        files_metadata : dict
            Dictionary containing file metadata (each key is a filepath and each value its metadata).
        decompressed_files_path : str
            Filepath of the decompressed received zipfile.
        cluster_items : dict
            Object containing cluster internal variables from the cluster.json file.
        worker_name : str
            Name of the worker instance. Used to access the correct worker folder.
        timeout : int
            Seconds to wait before stopping the task.

        Returns
        -------
        result : dict
            Dict containing number of updated chunks and any error found in the process.
        """
        result = {
            'total_updated': 0,
            'errors_per_folder': defaultdict(list),
            'generic_errors': []
        }

        try:
            with utils.Timeout(timeout):
                for file_path, data in files_metadata.items():
                    full_path = os.path.join(common.wazuh_path, file_path)
                    item_key = data['cluster_item_key']

                    # Only valid client.keys is the local one (master).
                    if os.path.basename(file_path) == 'client.keys':
                        raise exception.WazuhClusterError(3007)

                    # If the file is merged, create individual files from it.
                    if data['merged']:
                        for unmerged_file_path, file_data, file_time in wazuh.core.cluster.cluster.unmerge_info(
                                data['merge_type'], decompressed_files_path,
                                data['merge_name']):
                            try:
                                # Destination path.
                                full_unmerged_name = os.path.join(
                                    common.wazuh_path, unmerged_file_path)
                                # Path where to create the file before moving it to the destination path.
                                tmp_unmerged_path = os.path.join(
                                    common.wazuh_path, 'queue', 'cluster',
                                    worker_name,
                                    os.path.basename(unmerged_file_path))

                                # Format the file_data specified inside the merged file.
                                try:
                                    mtime = datetime.strptime(
                                        file_time, '%Y-%m-%d %H:%M:%S.%f')
                                except ValueError:
                                    mtime = datetime.strptime(
                                        file_time, '%Y-%m-%d %H:%M:%S')

                                # If the file already existed, check if it is older than the one from worker.
                                if os.path.isfile(full_unmerged_name):
                                    local_mtime = datetime.utcfromtimestamp(
                                        int(
                                            os.stat(
                                                full_unmerged_name).st_mtime))
                                    if local_mtime > mtime:
                                        continue

                                # Create file in temporal path and safe move it to the destination path.
                                with open(tmp_unmerged_path, 'wb') as f:
                                    f.write(file_data)

                                mtime_epoch = timegm(mtime.timetuple())
                                utils.safe_move(
                                    tmp_unmerged_path,
                                    full_unmerged_name,
                                    ownership=(common.wazuh_uid(),
                                               common.wazuh_gid()),
                                    permissions=cluster_items['files']
                                    [item_key]['permissions'],
                                    time=(mtime_epoch, mtime_epoch))
                                result['total_updated'] += 1
                            except TimeoutError as e:
                                raise e
                            except Exception as e:
                                result['errors_per_folder'][item_key].append(
                                    str(e))

                    # If the file is not 'merged' type, move it directly to the destination path.
                    else:
                        try:
                            zip_path = os.path.join(decompressed_files_path,
                                                    file_path)
                            utils.safe_move(zip_path,
                                            full_path,
                                            ownership=(common.wazuh_uid(),
                                                       common.wazuh_gid()),
                                            permissions=cluster_items['files']
                                            [item_key]['permissions'])
                        except TimeoutError as e:
                            raise e
                        except Exception as e:
                            result['errors_per_folder'][item_key].append(
                                str(e))
        except TimeoutError:
            result['generic_errors'].append(
                "Timeout processing extra-valid files.")
        except Exception as e:
            result['generic_errors'].append(
                f"Error updating worker files (extra valid): '{str(e)}'.")

        return result