def test_from_file_override(self, id_compression, tmpdir):
        # prepare
        id_compression.return_value = None

        tmp_file = tmpdir.join("000000000000000000000001")
        tmp_file.write("dummy_content\n")

        wfile_info = WalFileInfo.from_file(tmp_file.strpath, name="000000000000000000000002")
        assert wfile_info.name == "000000000000000000000002"
        assert wfile_info.size == tmp_file.size()
        assert wfile_info.time == tmp_file.mtime()
        assert wfile_info.filename == "%s.meta" % tmp_file.strpath
        assert wfile_info.compression is None
        assert wfile_info.relpath() == ("0000000000000000/000000000000000000000002")

        wfile_info = WalFileInfo.from_file(tmp_file.strpath, size=42)
        assert wfile_info.name == tmp_file.basename
        assert wfile_info.size == 42
        assert wfile_info.time == tmp_file.mtime()
        assert wfile_info.filename == "%s.meta" % tmp_file.strpath
        assert wfile_info.compression is None
        assert wfile_info.relpath() == ("0000000000000000/000000000000000000000001")

        wfile_info = WalFileInfo.from_file(tmp_file.strpath, time=43)
        assert wfile_info.name == tmp_file.basename
        assert wfile_info.size == tmp_file.size()
        assert wfile_info.time == 43
        assert wfile_info.filename == "%s.meta" % tmp_file.strpath
        assert wfile_info.compression is None
        assert wfile_info.relpath() == ("0000000000000000/000000000000000000000001")
    def test_from_file_override(self, id_compression, tmpdir):
        # prepare
        id_compression.return_value = None

        tmp_file = tmpdir.join("000000000000000000000001")
        tmp_file.write('dummy_content\n')

        wfile_info = WalFileInfo.from_file(tmp_file.strpath,
                                           name="000000000000000000000002")
        assert wfile_info.name == '000000000000000000000002'
        assert wfile_info.size == tmp_file.size()
        assert wfile_info.time == tmp_file.mtime()
        assert wfile_info.filename == '%s.meta' % tmp_file.strpath
        assert wfile_info.compression is None
        assert wfile_info.relpath() == (
            '0000000000000000/000000000000000000000002')

        wfile_info = WalFileInfo.from_file(tmp_file.strpath, size=42)
        assert wfile_info.name == tmp_file.basename
        assert wfile_info.size == 42
        assert wfile_info.time == tmp_file.mtime()
        assert wfile_info.filename == '%s.meta' % tmp_file.strpath
        assert wfile_info.compression is None
        assert wfile_info.relpath() == (
            '0000000000000000/000000000000000000000001')

        wfile_info = WalFileInfo.from_file(tmp_file.strpath, time=43)
        assert wfile_info.name == tmp_file.basename
        assert wfile_info.size == tmp_file.size()
        assert wfile_info.time == 43
        assert wfile_info.filename == '%s.meta' % tmp_file.strpath
        assert wfile_info.compression is None
        assert wfile_info.relpath() == (
            '0000000000000000/000000000000000000000001')
Exemple #3
0
    def get_latest_archived_wal(self):
        """
        Return the WalFileInfo of the last WAL file in the archive,
        or None if the archive doesn't contain any WAL file.

        :rtype: WalFileInfo|None
        """
        # TODO: consider timeline?
        from os.path import isdir, join

        root = self.config.wals_directory

        # If the WAL archive directory doesn't exists the archive is empty
        if not isdir(root):
            return None

        # Traverse all the directory in the archive in reverse order,
        # returning the first WAL file found
        for name in sorted(os.listdir(root), reverse=True):
            fullname = join(root, name)
            # All relevant files are in subdirectories, so
            # we skip any non-directory entry
            if isdir(fullname):
                hash_dir = fullname
                # Inspect contained files in reverse order
                for wal_name in sorted(os.listdir(hash_dir), reverse=True):
                    fullname = join(hash_dir, wal_name)
                    # Return the first file that has the correct name
                    if not isdir(fullname) and xlog.is_wal_file(fullname):
                        return WalFileInfo.from_file(fullname)

        # If we get here, no WAL files have been found
        return None
Exemple #4
0
    def get_next_batch(self):
        """
        Returns the next batch of WAL files that have been archived through
        a PostgreSQL's 'archive_command' (in the 'incoming' directory)

        :return: WalArchiverQueue: list of WAL files
        """
        # Get the batch size from configuration (0 = unlimited)
        batch_size = self.config.archiver_batch_size
        # List and sort all files in the incoming directory
        file_names = glob(
            os.path.join(self.config.incoming_wals_directory, '*'))
        file_names.sort()

        # Process anything that looks like a valid WAL file. Anything
        # else is treated like an error/anomaly
        files = []
        errors = []
        for file_name in file_names:
            if xlog.is_any_xlog_file(file_name) and os.path.isfile(file_name):
                files.append(file_name)
            else:
                errors.append(file_name)

        # Build the list of WalFileInfo
        wal_files = [WalFileInfo.from_file(f) for f in files]
        return WalArchiverQueue(wal_files,
                                batch_size=batch_size,
                                errors=errors)
    def get_next_batch(self):
        """
        Returns the next batch of WAL files that have been archived through
        a PostgreSQL's 'archive_command' (in the 'incoming' directory)

        :return: WalArchiverBatch: list of WAL files
        """
        # List and sort all files in the incoming directory
        file_names = glob(os.path.join(
            self.config.incoming_wals_directory, '*'))
        file_names.sort()

        # Process anything that looks like a valid WAL file. Anything
        # else is treated like an error/anomaly
        files = []
        errors = []
        for file_name in file_names:
            if xlog.is_any_xlog_file(file_name) and os.path.isfile(file_name):
                files.append(file_name)
            else:
                errors.append(file_name)

        # Build the list of WalFileInfo
        wal_files = [WalFileInfo.from_file(f) for f in files]
        return WalArchiverBatch(wal_files, errors=errors)
Exemple #6
0
    def get_next_batch(self):
        """
        Returns the next batch of WAL files that have been archived via
        streaming replication (in the 'streaming' directory)

        This method always leaves one file in the "streaming" directory,
        because the 'pg_receivexlog' process needs at least one file to
        detect the current streaming position after a restart.

        :return: WalArchiverQueue: list of WAL files
        """
        # Get the batch size from configuration (0 = unlimited)
        batch_size = self.config.streaming_archiver_batch_size
        # List and sort all files in the incoming directory
        file_names = glob(
            os.path.join(self.config.streaming_wals_directory, '*'))
        file_names.sort()

        # Process anything that looks like a valid WAL file,
        # including partial ones and history files.
        # Anything else is treated like an error/anomaly
        files = []
        skip = []
        errors = []
        for file_name in file_names:
            # Ignore temporary files
            if file_name.endswith('.tmp'):
                continue
            # If the file doesn't exist, it has been renamed/removed while
            # we were reading the directory. Ignore it.
            if not os.path.exists(file_name):
                continue
            if not os.path.isfile(file_name):
                errors.append(file_name)
            elif xlog.is_partial_file(file_name):
                skip.append(file_name)
            elif xlog.is_any_xlog_file(file_name):
                files.append(file_name)
            else:
                errors.append(file_name)
        # In case of more than a partial file, keep the last
        # and treat the rest as normal files
        if len(skip) > 1:
            partials = skip[:-1]
            _logger.info('Archiving partial files for server %s: %s' %
                         (self.config.name, ", ".join(
                             [os.path.basename(f) for f in partials])))
            files.extend(partials)
            skip = skip[-1:]

        # Keep the last full WAL file in case no partial file is present
        elif len(skip) == 0 and files:
            skip.append(files.pop())

        # Build the list of WalFileInfo
        wal_files = [WalFileInfo.from_file(f, compression=None) for f in files]
        return WalArchiverQueue(wal_files,
                                batch_size=batch_size,
                                errors=errors,
                                skip=skip)
Exemple #7
0
    def get_next_batch(self):
        """
        Returns the next batch of WAL files that have been archived through
        a PostgreSQL's 'archive_command' (in the 'incoming' directory)

        :return: WalArchiverQueue: list of WAL files
        """
        # Get the batch size from configuration (0 = unlimited)
        batch_size = self.config.archiver_batch_size
        # List and sort all files in the incoming directory
        # IMPORTANT: the list is sorted, and this allows us to know that the
        # WAL stream we have is monotonically increasing. That allows us to
        # verify that a backup has all the WALs required for the restore.
        file_names = glob(
            os.path.join(self.config.incoming_wals_directory, '*'))
        file_names.sort()

        # Process anything that looks like a valid WAL file. Anything
        # else is treated like an error/anomaly
        files = []
        errors = []
        for file_name in file_names:
            # Ignore temporary files
            if file_name.endswith('.tmp'):
                continue
            if xlog.is_any_xlog_file(file_name) and os.path.isfile(file_name):
                files.append(file_name)
            else:
                errors.append(file_name)

        # Build the list of WalFileInfo
        wal_files = [WalFileInfo.from_file(f) for f in files]
        return WalArchiverQueue(wal_files,
                                batch_size=batch_size,
                                errors=errors)
Exemple #8
0
 def test_from_file_no_compression(self, tmpdir):
     tmp_file = tmpdir.join("000000000000000000000001")
     tmp_file.write('dummy_content\n')
     stat = os.stat(tmp_file.strpath)
     wfile_info = WalFileInfo.from_file(tmp_file.strpath)
     assert wfile_info.name == tmp_file.basename
     assert wfile_info.size == stat.st_size
     assert wfile_info.time == stat.st_mtime
     assert wfile_info.filename == '%s.meta' % tmp_file.strpath
     assert wfile_info.relpath() == '0000000000000000/000000000000000000000001'
 def test_from_file_no_compression(self, tmpdir):
     tmp_file = tmpdir.join("000000000000000000000001")
     tmp_file.write('dummy_content\n')
     stat = os.stat(tmp_file.strpath)
     wfile_info = WalFileInfo.from_file(tmp_file.strpath)
     assert wfile_info.name == tmp_file.basename
     assert wfile_info.size == stat.st_size
     assert wfile_info.time == stat.st_mtime
     assert wfile_info.filename == '%s.meta' % tmp_file.strpath
     assert wfile_info.relpath() == (
         '0000000000000000/000000000000000000000001')
Exemple #10
0
 def test_from_file_no_compression(self, mock_compression_manager, tmpdir):
     tmp_file = tmpdir.join("000000000000000000000001")
     tmp_file.write("dummy_content\n")
     stat = os.stat(tmp_file.strpath)
     wfile_info = WalFileInfo.from_file(tmp_file.strpath,
                                        mock_compression_manager)
     assert wfile_info.name == tmp_file.basename
     assert wfile_info.size == stat.st_size
     assert wfile_info.time == stat.st_mtime
     assert wfile_info.filename == "%s.meta" % tmp_file.strpath
     assert wfile_info.relpath() == (
         "0000000000000000/000000000000000000000001")
Exemple #11
0
    def test_from_file_override(self, compression_manager, tmpdir):
        # prepare
        compression_manager.identify_compression.return_value = None
        compression_manager.unidentified_compression = None

        tmp_file = tmpdir.join("000000000000000000000001")
        tmp_file.write("dummy_content\n")

        wfile_info = WalFileInfo.from_file(tmp_file.strpath,
                                           compression_manager,
                                           name="000000000000000000000002")
        assert wfile_info.name == "000000000000000000000002"
        assert wfile_info.size == tmp_file.size()
        assert wfile_info.time == tmp_file.mtime()
        assert wfile_info.filename == "%s.meta" % tmp_file.strpath
        assert wfile_info.compression is None
        assert wfile_info.relpath() == (
            "0000000000000000/000000000000000000000002")

        wfile_info = WalFileInfo.from_file(tmp_file.strpath,
                                           compression_manager,
                                           size=42)
        assert wfile_info.name == tmp_file.basename
        assert wfile_info.size == 42
        assert wfile_info.time == tmp_file.mtime()
        assert wfile_info.filename == "%s.meta" % tmp_file.strpath
        assert wfile_info.compression is None
        assert wfile_info.relpath() == (
            "0000000000000000/000000000000000000000001")

        wfile_info = WalFileInfo.from_file(tmp_file.strpath,
                                           compression_manager,
                                           time=43)
        assert wfile_info.name == tmp_file.basename
        assert wfile_info.size == tmp_file.size()
        assert wfile_info.time == 43
        assert wfile_info.filename == "%s.meta" % tmp_file.strpath
        assert wfile_info.compression is None
        assert wfile_info.relpath() == (
            "0000000000000000/000000000000000000000001")
Exemple #12
0
    def test_from_file_compression(self, id_compression, tmpdir):
        # prepare
        id_compression.return_value = 'test_compression'

        tmp_file = tmpdir.join("000000000000000000000001")
        tmp_file.write('dummy_content\n')
        wfile_info = WalFileInfo.from_file(tmp_file.strpath)
        assert wfile_info.name == tmp_file.basename
        assert wfile_info.size == tmp_file.size()
        assert wfile_info.time == tmp_file.mtime()
        assert wfile_info.filename == '%s.meta' % tmp_file.strpath
        assert wfile_info.compression == 'test_compression'
        assert wfile_info.relpath() == '0000000000000000/000000000000000000000001'
    def test_from_file_default_compression(self, id_compression, tmpdir):
        # prepare
        id_compression.return_value = None

        tmp_file = tmpdir.join("00000001000000E500000064")
        tmp_file.write("dummy_content\n")
        wfile_info = WalFileInfo.from_file(tmp_file.strpath, default_compression="test_default_compression")
        assert wfile_info.name == tmp_file.basename
        assert wfile_info.size == tmp_file.size()
        assert wfile_info.time == tmp_file.mtime()
        assert wfile_info.filename == "%s.meta" % tmp_file.strpath
        assert wfile_info.compression == "test_default_compression"
        assert wfile_info.relpath() == ("00000001000000E5/00000001000000E500000064")
Exemple #14
0
    def get_next_batch(self):
        """
        Returns the next batch of WAL files that have been archived via
        streaming replication (in the 'streaming' directory)

        This method always leaves one file in the "streaming" directory,
        because the 'pg_receivexlog' process needs at least one file to
        detect the current streaming position after a restart.

        :return: WalArchiverQueue: list of WAL files
        """
        # Get the batch size from configuration (0 = unlimited)
        batch_size = self.config.streaming_archiver_batch_size
        # List and sort all files in the incoming directory
        file_names = glob(os.path.join(
            self.config.streaming_wals_directory, '*'))
        file_names.sort()

        # Process anything that looks like a valid WAL file,
        # including partial ones and history files.
        # Anything else is treated like an error/anomaly
        files = []
        skip = []
        errors = []
        for file_name in file_names:
            if not os.path.isfile(file_name):
                errors.append(file_name)
            elif xlog.is_partial_file(file_name):
                skip.append(file_name)
            elif xlog.is_any_xlog_file(file_name):
                files.append(file_name)
            else:
                errors.append(file_name)
        # In case of more than a partial file, keep the last
        # and treat the rest as errors
        if len(skip) > 1:
            errors.extend(skip[:-1])
            _logger.warning('Multiple partial files found for server %s: %s' %
                            (self.config.name,
                             ", ".join([os.path.basename(f) for f in errors])))
            skip = skip[-1:]

        # Keep the last full WAL file in case no partial file is present
        elif len(skip) == 0 and files:
            skip.append(files.pop())

        # Build the list of WalFileInfo
        wal_files = [WalFileInfo.from_file(f, compression=None) for f in files]
        return WalArchiverQueue(wal_files,
                                batch_size=batch_size,
                                errors=errors, skip=skip)
    def test_from_file_compression(self, id_compression, tmpdir):
        # prepare
        id_compression.return_value = 'test_compression'

        tmp_file = tmpdir.join("000000000000000000000001")
        tmp_file.write('dummy_content\n')
        wfile_info = WalFileInfo.from_file(tmp_file.strpath)
        assert wfile_info.name == tmp_file.basename
        assert wfile_info.size == tmp_file.size()
        assert wfile_info.time == tmp_file.mtime()
        assert wfile_info.filename == '%s.meta' % tmp_file.strpath
        assert wfile_info.compression == 'test_compression'
        assert wfile_info.relpath() == (
            '0000000000000000/000000000000000000000001')
Exemple #16
0
    def test_from_file_compression(self, mock_compression_manager, tmpdir):
        # prepare
        mock_compression_manager.identify_compression.return_value = "test_compression"

        tmp_file = tmpdir.join("000000000000000000000001")
        tmp_file.write("dummy_content\n")
        wfile_info = WalFileInfo.from_file(tmp_file.strpath,
                                           mock_compression_manager)
        assert wfile_info.name == tmp_file.basename
        assert wfile_info.size == tmp_file.size()
        assert wfile_info.time == tmp_file.mtime()
        assert wfile_info.filename == "%s.meta" % tmp_file.strpath
        assert wfile_info.compression == "test_compression"
        assert wfile_info.relpath() == (
            "0000000000000000/000000000000000000000001")
Exemple #17
0
    def test_from_file_unidentified_compression(self, id_compression, tmpdir):
        # prepare
        id_compression.return_value = None

        tmp_file = tmpdir.join("00000001000000E500000064")
        tmp_file.write("dummy_content\n")
        wfile_info = WalFileInfo.from_file(
            tmp_file.strpath,
            unidentified_compression="test_unidentified_compression")
        assert wfile_info.name == tmp_file.basename
        assert wfile_info.size == tmp_file.size()
        assert wfile_info.time == tmp_file.mtime()
        assert wfile_info.filename == "%s.meta" % tmp_file.strpath
        assert wfile_info.compression == "test_unidentified_compression"
        assert wfile_info.relpath() == (
            "00000001000000E5/00000001000000E500000064")
Exemple #18
0
    def get_latest_archived_wals_info(self):
        """
        Return a dictionary of timelines associated with the
        WalFileInfo of the last WAL file in the archive,
        or None if the archive doesn't contain any WAL file.

        :rtype: dict[str, WalFileInfo]|None
        """
        from os.path import isdir, join

        root = self.config.wals_directory

        # If the WAL archive directory doesn't exists the archive is empty
        if not isdir(root):
            return None

        # Traverse all the directory in the archive in reverse order,
        # returning the first WAL file found
        timelines = {}
        for name in sorted(os.listdir(root), reverse=True):
            fullname = join(root, name)
            # All relevant files are in subdirectories, so
            # we skip any non-directory entry
            if isdir(fullname):
                # Extract the timeline. If it is not valid, skip this directory
                try:
                    timeline = name[0:8]
                    int(timeline, 16)
                except ValueError:
                    continue

                # If this timeline already has a file, skip this directory
                if timeline in timelines:
                    continue

                hash_dir = fullname
                # Inspect contained files in reverse order
                for wal_name in sorted(os.listdir(hash_dir), reverse=True):
                    fullname = join(hash_dir, wal_name)
                    # Return the first file that has the correct name
                    if not isdir(fullname) and xlog.is_wal_file(fullname):
                        timelines[timeline] = WalFileInfo.from_file(fullname)
                        break

        # Return the timeline map or None if it is empty
        return timelines or None
    def get_next_batch(self):
        """
        Returns the next batch of WAL files that have been archived via
        streaming replication (in the 'streaming' directory)

        This method always leaves one file in the "streaming" directory,
        because the 'pg_receivexlog' process needs at least one file to
        detect the current streaming position after a restart.

        :return: WalArchiverBatch: list of WAL files
        """
        # List and sort all files in the incoming directory
        file_names = glob(os.path.join(
            self.config.streaming_wals_directory, '*'))
        file_names.sort()

        # Process anything that looks like a valid WAL file,
        # including partial ones.
        # Anything else is treated like an error/anomaly
        files = []
        skip = []
        errors = []
        for file_name in file_names:
            if xlog.is_wal_file(file_name) and os.path.isfile(file_name):
                files.append(file_name)
            elif xlog.is_partial_file(file_name) and os.path.isfile(file_name):
                skip.append(file_name)
            else:
                errors.append(file_name)
        # In case of more than a partial file, keep the last
        # and treat the rest as errors
        if len(skip) > 1:
            errors.extend(skip[:-1])
            skip = skip[-1:]

        # Keep the last full WAL file in case no partial file is present
        elif len(skip) == 0 and files:
            skip.append(files.pop())

        # Build the list of WalFileInfo
        wal_files = [WalFileInfo.from_file(f, compression=None) for f in files]
        return WalArchiverBatch(wal_files, errors=errors, skip=skip)
Exemple #20
0
    def get_next_batch(self):
        """
        Returns the next batch of WAL files that have been archived via
        streaming replication (in the 'streaming' directory)

        This method always leaves one file in the "streaming" directory,
        because the 'pg_receivexlog' process needs at least one file to
        detect the current streaming position after a restart.

        :return: WalArchiverBatch: list of WAL files
        """
        # List and sort all files in the incoming directory
        file_names = glob(
            os.path.join(self.config.streaming_wals_directory, '*'))
        file_names.sort()

        # Process anything that looks like a valid WAL file,
        # including partial ones.
        # Anything else is treated like an error/anomaly
        files = []
        skip = []
        errors = []
        for file_name in file_names:
            if xlog.is_wal_file(file_name) and os.path.isfile(file_name):
                files.append(file_name)
            elif xlog.is_partial_file(file_name) and os.path.isfile(file_name):
                skip.append(file_name)
            else:
                errors.append(file_name)
        # In case of more than a partial file, keep the last
        # and treat the rest as errors
        if len(skip) > 1:
            errors.extend(skip[:-1])
            skip = skip[-1:]

        # Keep the last full WAL file in case no partial file is present
        elif len(skip) == 0 and files:
            skip.append(files.pop())

        # Build the list of WalFileInfo
        wal_files = [WalFileInfo.from_file(f, compression=None) for f in files]
        return WalArchiverBatch(wal_files, errors=errors, skip=skip)
Exemple #21
0
def build_backup_manager(
    server=None, name=None, config=None, global_conf=None, main_conf=None
):
    """
    Instantiate a BackupManager object using mocked parameters

    The compression_manager member is mocked

    :param barman.server.Server|None server: Optional Server object
    :rtype: barman.backup.BackupManager
    """
    if server is None:
        server = build_mocked_server(name, config, global_conf, main_conf)
    with mock.patch("barman.backup.CompressionManager"):
        manager = BackupManager(server=server)
    manager.compression_manager.unidentified_compression = None
    manager.compression_manager.get_wal_file_info.side_effect = (
        lambda filename: WalFileInfo.from_file(filename, manager.compression_manager)
    )
    server.backup_manager = manager
    return manager
    def archive_wal(self, compressor, wal_info):
        """
        Archive a WAL segment and update the wal_info object

        :param compressor: the compressor for the file (if any)
        :param WalFileInfo wal_info: the WAL file is being processed
        """

        src_file = wal_info.orig_filename
        src_dir = os.path.dirname(src_file)
        dst_file = wal_info.fullpath(self.server)
        tmp_file = dst_file + '.tmp'
        dst_dir = os.path.dirname(dst_file)

        error = None
        try:
            # Run the pre_archive_script if present.
            script = HookScriptRunner(self.backup_manager,
                                      'archive_script', 'pre')
            script.env_from_wal_info(wal_info, src_file)
            script.run()

            # Run the pre_archive_retry_script if present.
            retry_script = RetryHookScriptRunner(self.backup_manager,
                                                 'archive_retry_script',
                                                 'pre')
            retry_script.env_from_wal_info(wal_info, src_file)
            retry_script.run()

            # Check if destination already exists
            if os.path.exists(dst_file):
                src_uncompressed = src_file
                dst_uncompressed = dst_file
                dst_info = WalFileInfo.from_file(dst_file)
                try:
                    comp_manager = self.backup_manager.compression_manager
                    if dst_info.compression is not None:
                        dst_uncompressed = dst_file + '.uncompressed'
                        comp_manager.get_compressor(
                            compression=dst_info.compression).decompress(
                                dst_file, dst_uncompressed)
                    if wal_info.compression:
                        src_uncompressed = src_file + '.uncompressed'
                        comp_manager.get_compressor(
                            compression=wal_info.compression).decompress(
                                src_file, src_uncompressed)
                    # Directly compare files.
                    # When the files are identical
                    # raise a MatchingDuplicateWalFile exception,
                    # otherwise raise a DuplicateWalFile exception.
                    if filecmp.cmp(dst_uncompressed, src_uncompressed):
                        raise MatchingDuplicateWalFile(wal_info)
                    else:
                        raise DuplicateWalFile(wal_info)
                finally:
                    if src_uncompressed != src_file:
                        os.unlink(src_uncompressed)
                    if dst_uncompressed != dst_file:
                        os.unlink(dst_uncompressed)

            mkpath(dst_dir)
            # Compress the file only if not already compressed
            if compressor and not wal_info.compression:
                compressor.compress(src_file, tmp_file)
                shutil.copystat(src_file, tmp_file)
                os.rename(tmp_file, dst_file)
                os.unlink(src_file)
                # Update wal_info
                stat = os.stat(dst_file)
                wal_info.size = stat.st_size
                wal_info.compression = compressor.compression
            else:
                # Try to atomically rename the file. If successful,
                # the renaming will be an atomic operation
                # (this is a POSIX requirement).
                try:
                    os.rename(src_file, dst_file)
                except OSError:
                    # Source and destination are probably on different
                    # filesystems
                    shutil.copy2(src_file, tmp_file)
                    os.rename(tmp_file, dst_file)
                    os.unlink(src_file)
            # At this point the original file has been removed
            wal_info.orig_filename = None

            # Execute fsync() on the archived WAL file
            file_fd = os.open(dst_file, os.O_RDONLY)
            os.fsync(file_fd)
            os.close(file_fd)
            # Execute fsync() on the archived WAL containing directory
            fsync_dir(dst_dir)
            # Execute fsync() also on the incoming directory
            fsync_dir(src_dir)
        except Exception as e:
            # In case of failure save the exception for the post scripts
            error = e
            raise

        # Ensure the execution of the post_archive_retry_script and
        # the post_archive_script
        finally:
            # Run the post_archive_retry_script if present.
            try:
                retry_script = RetryHookScriptRunner(self,
                                                     'archive_retry_script',
                                                     'post')
                retry_script.env_from_wal_info(wal_info, dst_file, error)
                retry_script.run()
            except AbortedRetryHookScript as e:
                # Ignore the ABORT_STOP as it is a post-hook operation
                _logger.warning("Ignoring stop request after receiving "
                                "abort (exit code %d) from post-archive "
                                "retry hook script: %s",
                                e.hook.exit_status, e.hook.script)

            # Run the post_archive_script if present.
            script = HookScriptRunner(self, 'archive_script', 'post', error)
            script.env_from_wal_info(wal_info, dst_file)
            script.run()
Exemple #23
0
    def test_decode_history_file(self, tmpdir):
        compressor = mock.Mock()

        # Regular history file
        p = tmpdir.join('00000002.history')
        p.write('1\t2/83000168\tat restore point "myrp"\n')
        wal_info = WalFileInfo.from_file(p.strpath)
        result = xlog.HistoryFileData(
            tli=2,
            parent_tli=1,
            reason='at restore point "myrp"',
            switchpoint=0x283000168)
        assert xlog.decode_history_file(wal_info, compressor) == [result]
        assert len(compressor.mock_calls) == 0

        # Comments must be skipped
        p = tmpdir.join('00000003.history')
        p.write('# Comment\n1\t2/83000168\tat restore point "testcomment"\n')
        wal_info = WalFileInfo.from_file(p.strpath)
        result = xlog.HistoryFileData(
            tli=3,
            parent_tli=1,
            reason='at restore point "testcomment"',
            switchpoint=0x283000168)
        assert xlog.decode_history_file(wal_info, compressor) == [result]
        assert len(compressor.mock_calls) == 0

        # History file with comments and empty lines
        p = tmpdir.join('00000004.history')
        p.write('# Comment\n\n1\t2/83000168\ttesting "testemptyline"\n')
        wal_info = WalFileInfo.from_file(p.strpath)
        result = xlog.HistoryFileData(
            tli=4,
            parent_tli=1,
            reason='testing "testemptyline"',
            switchpoint=0x283000168)
        assert xlog.decode_history_file(wal_info, compressor) == [result]
        assert len(compressor.mock_calls) == 0

        # Test compression handling Fix for bug #66 on github
        config_mock = mock.Mock()
        config_mock.compression = "gzip"

        # check custom compression method creation
        comp_manager = CompressionManager(config_mock, None)
        u = tmpdir.join('00000005.uncompressed')
        p = tmpdir.join('00000005.history')
        u.write('1\t2/83000168\tat restore point "myrp"\n')
        result = xlog.HistoryFileData(
            tli=5,
            parent_tli=1,
            reason='at restore point "myrp"',
            switchpoint=0x283000168)
        comp_manager.get_compressor('gzip').compress(u.strpath,
                                                     p.strpath)
        wal_info = WalFileInfo.from_file(p.strpath)
        assert xlog.decode_history_file(wal_info, comp_manager) == [result]

        with pytest.raises(barman.exceptions.BadHistoryFileContents):
            # Empty file
            p.write('')
            assert xlog.decode_history_file(wal_info, compressor)
            assert len(compressor.mock_calls) == 0

        with pytest.raises(barman.exceptions.BadHistoryFileContents):
            # Missing field
            p.write('1\t2/83000168')
            assert xlog.decode_history_file(wal_info, compressor)
            assert len(compressor.mock_calls) == 0

        with pytest.raises(barman.exceptions.BadHistoryFileContents):
            # Unattended field
            p.write('1\t2/83000168\tat restore point "myrp"\ttest')
            assert xlog.decode_history_file(wal_info, compressor)
            assert len(compressor.mock_calls) == 0
Exemple #24
0
    def archive_wal(self, compressor, wal_info):
        """
        Archive a WAL segment and update the wal_info object

        :param compressor: the compressor for the file (if any)
        :param WalFileInfo wal_info: the WAL file is being processed
        """

        src_file = wal_info.orig_filename
        src_dir = os.path.dirname(src_file)
        dst_file = wal_info.fullpath(self.server)
        tmp_file = dst_file + '.tmp'
        dst_dir = os.path.dirname(dst_file)

        error = None
        try:
            # Run the pre_archive_script if present.
            script = HookScriptRunner(self.backup_manager, 'archive_script',
                                      'pre')
            script.env_from_wal_info(wal_info, src_file)
            script.run()

            # Run the pre_archive_retry_script if present.
            retry_script = RetryHookScriptRunner(self.backup_manager,
                                                 'archive_retry_script', 'pre')
            retry_script.env_from_wal_info(wal_info, src_file)
            retry_script.run()

            # Check if destination already exists
            if os.path.exists(dst_file):
                src_uncompressed = src_file
                dst_uncompressed = dst_file
                dst_info = WalFileInfo.from_file(dst_file)
                try:
                    comp_manager = self.backup_manager.compression_manager
                    if dst_info.compression is not None:
                        dst_uncompressed = dst_file + '.uncompressed'
                        comp_manager.get_compressor(
                            compression=dst_info.compression).decompress(
                                dst_file, dst_uncompressed)
                    if wal_info.compression:
                        src_uncompressed = src_file + '.uncompressed'
                        comp_manager.get_compressor(
                            compression=wal_info.compression).decompress(
                                src_file, src_uncompressed)
                    # Directly compare files.
                    # When the files are identical
                    # raise a MatchingDuplicateWalFile exception,
                    # otherwise raise a DuplicateWalFile exception.
                    if filecmp.cmp(dst_uncompressed, src_uncompressed):
                        raise MatchingDuplicateWalFile(wal_info)
                    else:
                        raise DuplicateWalFile(wal_info)
                finally:
                    if src_uncompressed != src_file:
                        os.unlink(src_uncompressed)
                    if dst_uncompressed != dst_file:
                        os.unlink(dst_uncompressed)

            mkpath(dst_dir)
            # Compress the file only if not already compressed
            if compressor and not wal_info.compression:
                compressor.compress(src_file, tmp_file)
                shutil.copystat(src_file, tmp_file)
                os.rename(tmp_file, dst_file)
                os.unlink(src_file)
                # Update wal_info
                stat = os.stat(dst_file)
                wal_info.size = stat.st_size
                wal_info.compression = compressor.compression
            else:
                # Try to atomically rename the file. If successful,
                # the renaming will be an atomic operation
                # (this is a POSIX requirement).
                try:
                    os.rename(src_file, dst_file)
                except OSError:
                    # Source and destination are probably on different
                    # filesystems
                    shutil.copy2(src_file, tmp_file)
                    os.rename(tmp_file, dst_file)
                    os.unlink(src_file)
            # At this point the original file has been removed
            wal_info.orig_filename = None

            # Execute fsync() on the archived WAL file
            file_fd = os.open(dst_file, os.O_RDONLY)
            os.fsync(file_fd)
            os.close(file_fd)
            # Execute fsync() on the archived WAL containing directory
            fsync_dir(dst_dir)
            # Execute fsync() also on the incoming directory
            fsync_dir(src_dir)
        except Exception as e:
            # In case of failure save the exception for the post scripts
            error = e
            raise

        # Ensure the execution of the post_archive_retry_script and
        # the post_archive_script
        finally:
            # Run the post_archive_retry_script if present.
            try:
                retry_script = RetryHookScriptRunner(self,
                                                     'archive_retry_script',
                                                     'post')
                retry_script.env_from_wal_info(wal_info, dst_file, error)
                retry_script.run()
            except AbortedRetryHookScript as e:
                # Ignore the ABORT_STOP as it is a post-hook operation
                _logger.warning(
                    "Ignoring stop request after receiving "
                    "abort (exit code %d) from post-archive "
                    "retry hook script: %s", e.hook.exit_status, e.hook.script)

            # Run the post_archive_script if present.
            script = HookScriptRunner(self, 'archive_script', 'post', error)
            script.env_from_wal_info(wal_info, dst_file)
            script.run()
Exemple #25
0
    def rebuild_xlogdb(self):
        """
        Rebuild the whole xlog database guessing it from the archive content.
        """
        from os.path import isdir, join

        output.info("Rebuilding xlogdb for server %s", self.config.name)
        root = self.config.wals_directory
        default_compression = self.config.compression
        wal_count = label_count = history_count = 0
        # lock the xlogdb as we are about replacing it completely
        with self.server.xlogdb('w') as fxlogdb:
            xlogdb_new = fxlogdb.name + ".new"
            with open(xlogdb_new, 'w') as fxlogdb_new:
                for name in sorted(os.listdir(root)):
                    # ignore the xlogdb and its lockfile
                    if name.startswith(self.server.XLOG_DB):
                        continue
                    fullname = join(root, name)
                    if isdir(fullname):
                        # all relevant files are in subdirectories
                        hash_dir = fullname
                        for wal_name in sorted(os.listdir(hash_dir)):
                            fullname = join(hash_dir, wal_name)
                            if isdir(fullname):
                                _logger.warning(
                                    'unexpected directory '
                                    'rebuilding the wal database: %s',
                                    fullname)
                            else:
                                if xlog.is_wal_file(fullname):
                                    wal_count += 1
                                elif xlog.is_backup_file(fullname):
                                    label_count += 1
                                else:
                                    _logger.warning(
                                        'unexpected file '
                                        'rebuilding the wal database: %s',
                                        fullname)
                                    continue
                                wal_info = WalFileInfo.from_file(
                                    fullname,
                                    default_compression=default_compression)
                                fxlogdb_new.write(wal_info.to_xlogdb_line())
                    else:
                        # only history files are here
                        if xlog.is_history_file(fullname):
                            history_count += 1
                            wal_info = WalFileInfo.from_file(
                                fullname,
                                default_compression=default_compression)
                            fxlogdb_new.write(wal_info.to_xlogdb_line())
                        else:
                            _logger.warning(
                                'unexpected file '
                                'rebuilding the wal database: %s',
                                fullname)
                os.fsync(fxlogdb_new.fileno())
            shutil.move(xlogdb_new, fxlogdb.name)
            fsync_dir(os.path.dirname(fxlogdb.name))
        output.info('Done rebuilding xlogdb for server %s '
                    '(history: %s, backup_labels: %s, wal_file: %s)',
                    self.config.name, history_count, label_count, wal_count)
Exemple #26
0
    def archive_wal(self, verbose=True):
        """
        Executes WAL maintenance operations, such as archiving and compression

        If verbose is set to False, outputs something only if there is
        at least one file

        :param bool verbose: report even if no actions
        """
        found = False
        compressor = self.compression_manager.get_compressor()
        with self.server.xlogdb('a') as fxlogdb:
            if verbose:
                output.info("Processing xlog segments for %s",
                            self.config.name,
                            log=False)
            # Get the first available backup
            first_backup_id = self.get_first_backup(BackupInfo.STATUS_NOT_EMPTY)
            first_backup = self.server.get_backup(first_backup_id)
            for filename in sorted(glob(
                    os.path.join(self.config.incoming_wals_directory, '*'))):
                if not found and not verbose:
                    output.info("Processing xlog segments for %s",
                                self.config.name,
                                log=False)
                found = True

                # Create WAL Info object
                wal_info = WalFileInfo.from_file(filename, compression=None)

                # If there are no available backups ...
                if first_backup is None:
                    # ... delete xlog segments only for exclusive backups
                    if BackupOptions.CONCURRENT_BACKUP \
                            not in self.config.backup_options:
                        # Skipping history files
                        if not xlog.is_history_file(filename):
                            output.info("\tNo base backup available."
                                        " Trashing file %s"
                                        " from server %s",
                                        wal_info.name, self.config.name)
                            os.unlink(filename)
                            continue
                # ... otherwise
                else:
                    # ... delete xlog segments older than the first backup
                    if wal_info.name < first_backup.begin_wal:
                        # Skipping history files
                        if not xlog.is_history_file(filename):
                            output.info("\tOlder than first backup."
                                        " Trashing file %s"
                                        " from server %s",
                                        wal_info.name, self.config.name)
                            os.unlink(filename)
                            continue

                # Report to the user the WAL file we are archiving
                output.info("\t%s", os.path.basename(filename), log=False)
                _logger.info("Archiving %s/%s",
                             self.config.name,
                             os.path.basename(filename))
                # Archive the WAL file
                try:
                    self.cron_wal_archival(compressor, wal_info)
                except AbortedRetryHookScript as e:
                    _logger.warning("Archiving of %s/%s aborted by "
                                    "pre_archive_retry_script."
                                    "Reason: %s" % (self.config.name,
                                                    os.path.basename(),
                                                    e))
                    return
                # Updates the information of the WAL archive with
                # the latest segments
                fxlogdb.write(wal_info.to_xlogdb_line())
                # flush and fsync for every line
                fxlogdb.flush()
                os.fsync(fxlogdb.fileno())
        if not found and verbose:
            output.info("\tno file found", log=False)
Exemple #27
0
    def test_archive_wal(self, tmpdir, capsys):
        """
        Test WalArchiver.archive_wal behaviour when the WAL file already
        exists in the archive
        """

        # Setup the test environment
        backup_manager = build_backup_manager(
            name='TestServer',
            global_conf={
                'barman_home': tmpdir.strpath
            })
        backup_manager.compression_manager.get_compressor.return_value = None
        backup_manager.server.get_backup.return_value = None

        basedir = tmpdir.join('main')
        incoming_dir = basedir.join('incoming')
        archive_dir = basedir.join('wals')
        xlog_db = archive_dir.join('xlog.db')
        wal_name = '000000010000000000000001'
        wal_file = incoming_dir.join(wal_name)
        wal_file.ensure()
        archive_dir.ensure(dir=True)
        xlog_db.ensure()
        backup_manager.server.xlogdb.return_value.__enter__.return_value = (
            xlog_db.open(mode='a'))
        archiver = FileWalArchiver(backup_manager)
        backup_manager.server.archivers = [archiver]

        # Tests a basic archival process
        wal_info = WalFileInfo.from_file(wal_file.strpath)
        archiver.archive_wal(None, wal_info)

        assert not os.path.exists(wal_file.strpath)
        assert os.path.exists(wal_info.fullpath(backup_manager.server))

        # Tests the archiver behaviour for duplicate WAL files, as the
        # wal file named '000000010000000000000001' was already archived
        # in the previous test
        wal_file.ensure()
        wal_info = WalFileInfo.from_file(wal_file.strpath)

        with pytest.raises(MatchingDuplicateWalFile):
            archiver.archive_wal(None, wal_info)

        # Tests the archiver behaviour for duplicated WAL files with
        # different contents
        wal_file.write('test')
        wal_info = WalFileInfo.from_file(wal_file.strpath)

        with pytest.raises(DuplicateWalFile):
            archiver.archive_wal(None, wal_info)

        # Tests the archiver behaviour for duplicate WAL files, as the
        # wal file named '000000010000000000000001' was already archived
        # in the previous test and the input file uses compression
        compressor = PyGZipCompressor(backup_manager.config, 'pygzip')
        compressor.compress(wal_file.strpath, wal_file.strpath)
        wal_info = WalFileInfo.from_file(wal_file.strpath)
        assert os.path.exists(wal_file.strpath)
        backup_manager.compression_manager.get_compressor.return_value = (
            compressor)

        with pytest.raises(MatchingDuplicateWalFile):
            archiver.archive_wal(None, wal_info)

        # Test the archiver behaviour when the incoming file is compressed
        # and it has been already archived and compressed.
        compressor.compress(wal_info.fullpath(backup_manager.server),
                            wal_info.fullpath(backup_manager.server))

        wal_info = WalFileInfo.from_file(wal_file.strpath)
        with pytest.raises(MatchingDuplicateWalFile):
            archiver.archive_wal(None, wal_info)

        # Reset the status of the incoming and WALs directory
        # removing the files archived during the preceding tests.
        os.unlink(wal_info.fullpath(backup_manager.server))
        os.unlink(wal_file.strpath)

        # Test the archival of a WAL file using compression.
        wal_file.write('test')
        wal_info = WalFileInfo.from_file(wal_file.strpath)
        archiver.archive_wal(compressor, wal_info)
        assert os.path.exists(wal_info.fullpath(backup_manager.server))
        assert not os.path.exists(wal_file.strpath)
        assert 'gzip' == identify_compression(
            wal_info.fullpath(backup_manager.server)
        )
Exemple #28
0
    def test_decode_history_file(self, tmpdir):
        compressor = mock.Mock()

        # Regular history file
        p = tmpdir.join('00000002.history')
        p.write('1\t2/83000168\tat restore point "myrp"\n')
        wal_info = WalFileInfo.from_file(p.strpath)
        result = xlog.HistoryFileData(
            tli=2,
            parent_tli=1,
            reason='at restore point "myrp"',
            switchpoint=0x283000168)
        assert xlog.decode_history_file(wal_info, compressor) == [result]
        assert len(compressor.mock_calls) == 0

        # Comments must be skipped
        p = tmpdir.join('00000003.history')
        p.write('# Comment\n1\t2/83000168\tat restore point "testcomment"\n')
        wal_info = WalFileInfo.from_file(p.strpath)
        result = xlog.HistoryFileData(
            tli=3,
            parent_tli=1,
            reason='at restore point "testcomment"',
            switchpoint=0x283000168)
        assert xlog.decode_history_file(wal_info, compressor) == [result]
        assert len(compressor.mock_calls) == 0

        # History file with comments and empty lines
        p = tmpdir.join('00000004.history')
        p.write('# Comment\n\n1\t2/83000168\ttesting "testemptyline"\n')
        wal_info = WalFileInfo.from_file(p.strpath)
        result = xlog.HistoryFileData(
            tli=4,
            parent_tli=1,
            reason='testing "testemptyline"',
            switchpoint=0x283000168)
        assert xlog.decode_history_file(wal_info, compressor) == [result]
        assert len(compressor.mock_calls) == 0

        # Test compression handling Fix for bug #66 on github
        config_mock = mock.Mock()
        config_mock.compression = "gzip"

        # check custom compression method creation
        comp_manager = CompressionManager(config_mock, None)
        u = tmpdir.join('00000005.uncompressed')
        p = tmpdir.join('00000005.history')
        u.write('1\t2/83000168\tat restore point "myrp"\n')
        result = xlog.HistoryFileData(
            tli=5,
            parent_tli=1,
            reason='at restore point "myrp"',
            switchpoint=0x283000168)
        comp_manager.get_compressor('gzip').compress(u.strpath,
                                                     p.strpath)
        wal_info = WalFileInfo.from_file(p.strpath)
        assert xlog.decode_history_file(wal_info, comp_manager) == [result]

        with pytest.raises(barman.exceptions.BadHistoryFileContents):
            # Empty file
            p.write('')
            assert xlog.decode_history_file(wal_info, compressor)
            assert len(compressor.mock_calls) == 0

        with pytest.raises(barman.exceptions.BadHistoryFileContents):
            # Missing field
            p.write('1\t2/83000168')
            assert xlog.decode_history_file(wal_info, compressor)
            assert len(compressor.mock_calls) == 0

        with pytest.raises(barman.exceptions.BadHistoryFileContents):
            # Unattended field
            p.write('1\t2/83000168\tat restore point "myrp"\ttest')
            assert xlog.decode_history_file(wal_info, compressor)
            assert len(compressor.mock_calls) == 0
Exemple #29
0
    def test_archive_wal(self, tmpdir, capsys):
        """
        Test WalArchiver.archive_wal behaviour when the WAL file already
        exists in the archive
        """

        # Setup the test environment
        backup_manager = build_backup_manager(
            name="TestServer", global_conf={"barman_home": tmpdir.strpath}
        )
        backup_manager.compression_manager.get_compressor.return_value = None
        backup_manager.server.get_backup.return_value = None

        basedir = tmpdir.join("main")
        incoming_dir = basedir.join("incoming")
        archive_dir = basedir.join("wals")
        xlog_db = archive_dir.join("xlog.db")
        wal_name = "000000010000000000000001"
        wal_file = incoming_dir.join(wal_name)
        wal_file.ensure()
        archive_dir.ensure(dir=True)
        xlog_db.ensure()
        backup_manager.server.xlogdb.return_value.__enter__.return_value = xlog_db.open(
            mode="a"
        )
        archiver = FileWalArchiver(backup_manager)
        backup_manager.server.archivers = [archiver]

        # Tests a basic archival process
        wal_info = WalFileInfo.from_file(wal_file.strpath)
        archiver.archive_wal(None, wal_info)

        assert not os.path.exists(wal_file.strpath)
        assert os.path.exists(wal_info.fullpath(backup_manager.server))

        # Tests the archiver behaviour for duplicate WAL files, as the
        # wal file named '000000010000000000000001' was already archived
        # in the previous test
        wal_file.ensure()
        wal_info = WalFileInfo.from_file(wal_file.strpath)

        with pytest.raises(MatchingDuplicateWalFile):
            archiver.archive_wal(None, wal_info)

        # Tests the archiver behaviour for duplicated WAL files with
        # different contents
        wal_file.write("test")
        wal_info = WalFileInfo.from_file(wal_file.strpath)

        with pytest.raises(DuplicateWalFile):
            archiver.archive_wal(None, wal_info)

        # Tests the archiver behaviour for duplicate WAL files, as the
        # wal file named '000000010000000000000001' was already archived
        # in the previous test and the input file uses compression
        compressor = PyGZipCompressor(backup_manager.config, "pygzip")
        compressor.compress(wal_file.strpath, wal_file.strpath)
        wal_info = WalFileInfo.from_file(wal_file.strpath)
        assert os.path.exists(wal_file.strpath)
        backup_manager.compression_manager.get_compressor.return_value = compressor

        with pytest.raises(MatchingDuplicateWalFile):
            archiver.archive_wal(None, wal_info)

        # Test the archiver behaviour when the incoming file is compressed
        # and it has been already archived and compressed.
        compressor.compress(
            wal_info.fullpath(backup_manager.server),
            wal_info.fullpath(backup_manager.server),
        )

        wal_info = WalFileInfo.from_file(wal_file.strpath)
        with pytest.raises(MatchingDuplicateWalFile):
            archiver.archive_wal(None, wal_info)

        # Reset the status of the incoming and WALs directory
        # removing the files archived during the preceding tests.
        os.unlink(wal_info.fullpath(backup_manager.server))
        os.unlink(wal_file.strpath)

        # Test the archival of a WAL file using compression.
        wal_file.write("test")
        wal_info = WalFileInfo.from_file(wal_file.strpath)
        archiver.archive_wal(compressor, wal_info)
        assert os.path.exists(wal_info.fullpath(backup_manager.server))
        assert not os.path.exists(wal_file.strpath)
        assert "gzip" == identify_compression(wal_info.fullpath(backup_manager.server))
Exemple #30
0
    def rebuild_xlogdb(self):
        """
        Rebuild the whole xlog database guessing it from the archive content.
        """
        from os.path import isdir, join

        output.info("Rebuilding xlogdb for server %s", self.config.name)
        root = self.config.wals_directory
        default_compression = self.config.compression
        wal_count = label_count = history_count = 0
        # lock the xlogdb as we are about replacing it completely
        with self.server.xlogdb('w') as fxlogdb:
            xlogdb_new = fxlogdb.name + ".new"
            with open(xlogdb_new, 'w') as fxlogdb_new:
                for name in sorted(os.listdir(root)):
                    # ignore the xlogdb and its lockfile
                    if name.startswith(self.server.XLOG_DB):
                        continue
                    fullname = join(root, name)
                    if isdir(fullname):
                        # all relevant files are in subdirectories
                        hash_dir = fullname
                        for wal_name in sorted(os.listdir(hash_dir)):
                            fullname = join(hash_dir, wal_name)
                            if isdir(fullname):
                                _logger.warning(
                                    'unexpected directory '
                                    'rebuilding the wal database: %s',
                                    fullname)
                            else:
                                if xlog.is_wal_file(fullname):
                                    wal_count += 1
                                elif xlog.is_backup_file(fullname):
                                    label_count += 1
                                elif fullname.endswith('.tmp'):
                                    _logger.warning(
                                        'temporary file found '
                                        'rebuilding the wal database: %s',
                                        fullname)
                                    continue
                                else:
                                    _logger.warning(
                                        'unexpected file '
                                        'rebuilding the wal database: %s',
                                        fullname)
                                    continue
                                wal_info = WalFileInfo.from_file(
                                    fullname,
                                    default_compression=default_compression)
                                fxlogdb_new.write(wal_info.to_xlogdb_line())
                    else:
                        # only history files are here
                        if xlog.is_history_file(fullname):
                            history_count += 1
                            wal_info = WalFileInfo.from_file(
                                fullname,
                                default_compression=default_compression)
                            fxlogdb_new.write(wal_info.to_xlogdb_line())
                        else:
                            _logger.warning(
                                'unexpected file '
                                'rebuilding the wal database: %s', fullname)
                os.fsync(fxlogdb_new.fileno())
            shutil.move(xlogdb_new, fxlogdb.name)
            fsync_dir(os.path.dirname(fxlogdb.name))
        output.info(
            'Done rebuilding xlogdb for server %s '
            '(history: %s, backup_labels: %s, wal_file: %s)', self.config.name,
            history_count, label_count, wal_count)