def get_required_xlog_files(self, backup, target_tli=None, target_time=None, target_xid=None): """ Get the xlog files required for a recovery """ begin = backup.begin_wal end = backup.end_wal # If timeline isn't specified, assume it is the same timeline # of the backup if not target_tli: target_tli, _, _ = xlog.decode_segment_name(end) with self.xlogdb() as fxlogdb: for line in fxlogdb: wal_info = WalFileInfo.from_xlogdb_line(line) # Handle .history files: add all of them to the output, # regardless of their age if xlog.is_history_file(wal_info.name): yield wal_info continue if wal_info.name < begin: continue tli, _, _ = xlog.decode_segment_name(wal_info.name) if tli > target_tli: continue yield wal_info if wal_info.name > end: end = wal_info.name if target_time and target_time < wal_info.time: break # return all the remaining history files for line in fxlogdb: wal_info = WalFileInfo.from_xlogdb_line(line) if xlog.is_history_file(wal_info.name): yield wal_info
def get_required_xlog_files(self, backup, target_tli=None, target_time=None, target_xid=None): """Get the xlog files required for a backup""" begin = backup.begin_wal end = backup.end_wal # If timeline isn't specified, assume it is the same timeline of the backup if not target_tli: target_tli, _, _ = xlog.decode_segment_name(end) with self.xlogdb() as fxlogdb: for line in fxlogdb: name, _, stamp, _ = self.xlogdb_parse_line(line) if name < begin: continue tli, _, _ = xlog.decode_segment_name(name) if tli > target_tli: continue yield name if name > end: end = name if target_time and target_time < stamp: break # return all the remaining history files for line in fxlogdb: name, _, stamp, _ = self.xlogdb_parse_line(line) if xlog.is_history_file(name): yield name
def get_required_xlog_files(self, backup, target_tli=None, target_time=None, target_xid=None): '''Get the xlog files required for a backup''' begin = backup.begin_wal end = backup.end_wal # If timeline isn't specified, assume it is the same timeline of the backup if not target_tli: target_tli, _, _ = xlog.decode_segment_name(end) with self.xlogdb() as fxlogdb: for line in fxlogdb: name, _, stamp, _ = self.xlogdb_parse_line(line) if name < begin: continue tli, _, _ = xlog.decode_segment_name(name) if tli > target_tli: continue yield name if name > end: end = name if target_time and target_time < stamp: break # return all the remaining history files for line in fxlogdb: name, _, stamp, _ = self.xlogdb_parse_line(line) if xlog.is_history_file(name): yield name
def remove_wal_before_backup(self, backup_info): """ Remove WAL files which have been archived before the start of the provided backup. If no backup_info is provided delete all available WAL files :param BackupInfo|None backup_info: the backup information structure :return list: a list of removed WAL files """ removed = [] with self.server.xlogdb() as fxlogdb: xlogdb_new = fxlogdb.name + ".new" with open(xlogdb_new, 'w') as fxlogdb_new: for line in fxlogdb: wal_info = WalFileInfo.from_xlogdb_line(line) # Keeps the WAL segment if it is a history file or later # than the given backup (the first available) if (xlog.is_history_file(wal_info.name) or (backup_info and wal_info.name >= backup_info.begin_wal)): fxlogdb_new.write(wal_info.to_xlogdb_line()) continue else: self.delete_wal(wal_info) removed.append(wal_info.name) fxlogdb_new.flush() os.fsync(fxlogdb_new.fileno()) shutil.move(xlogdb_new, fxlogdb.name) fsync_dir(os.path.dirname(fxlogdb.name)) return removed
def is_wal_relevant(self, wal_info, first_backup): """ Check the relevance of a WAL file according to a provided BackupInfo (usually the oldest on the server) to ensure that the WAL is newer than the start_wal of the backup. :param WalFileInfo wal_info: the WAL file we are checking :param BackupInfo first_backup: the backup used for the checks (usually the oldest available on the server) """ # Skip history files if xlog.is_history_file(wal_info.name): return True # If the WAL file has a timeline smaller than the one of # the oldest backup it cannot be used in any way. wal_timeline = xlog.decode_segment_name(wal_info.name)[0] if wal_timeline < first_backup.timeline: output.info("\tThe timeline of the WAL file %s (%s), is lower " "than the one of the oldest backup of " "server %s (%s). Moving the WAL in " "the error directory", wal_info.name, wal_timeline, self.config.name, first_backup.timeline) return False # Manage xlog segments older than the first backup if wal_info.name < first_backup.begin_wal: output.info("\tOlder than first backup of server %s. " "Moving the WAL file %s in the error directory", self.config.name, wal_info.name) return False return True
def get_wal_until_next_backup(self, backup, include_history=False): """ Get the xlog files between backup and the next :param BackupInfo backup: a backup object, the starting point to retrieve WALs :param bool include_history: option for the inclusion of include_history files into the output """ begin = backup.begin_wal next_end = None if self.get_next_backup(backup.backup_id): next_end = self.get_next_backup(backup.backup_id).end_wal backup_tli, _, _ = xlog.decode_segment_name(begin) with self.xlogdb() as fxlogdb: for line in fxlogdb: wal_info = WalFileInfo.from_xlogdb_line(line) # Handle .history files: add all of them to the output, # regardless of their age, if requested (the 'include_history' # parameter is True) if xlog.is_history_file(wal_info.name): if include_history: yield wal_info continue if wal_info.name < begin: continue tli, _, _ = xlog.decode_segment_name(wal_info.name) if tli > backup_tli: continue if not xlog.is_wal_file(wal_info.name): continue if next_end and wal_info.name > next_end: break yield wal_info
def test_history_file(self): assert not xlog.is_history_file('000000000000000200000001') assert not xlog.is_history_file( '00000001000000000000000A.00000020.backup') assert xlog.is_history_file('00000002.history') assert xlog.is_history_file('test/00000002.history') assert not xlog.is_history_file('00000000000000000000000') assert not xlog.is_history_file('0000000000000000000000000') assert not xlog.is_history_file('000000000000X00000000000') assert not xlog.is_history_file('00000001000000000000000A.backup') assert not xlog.is_any_xlog_file( 'test.00000001000000000000000A.00000020.backup') assert not xlog.is_history_file('00000001000000000000000A.history')
def remove_wal_before_backup(self, backup_info, timelines_to_protect=None): """ Remove WAL files which have been archived before the start of the provided backup. If no backup_info is provided delete all available WAL files If timelines_to_protect list is passed, never remove a wal in one of these timelines. :param BackupInfo|None backup_info: the backup information structure :param set timelines_to_protect: optional list of timelines to protect :return list: a list of removed WAL files """ removed = [] with self.server.xlogdb("r+") as fxlogdb: xlogdb_dir = os.path.dirname(fxlogdb.name) with tempfile.TemporaryFile(mode="w+", dir=xlogdb_dir) as fxlogdb_new: for line in fxlogdb: wal_info = WalFileInfo.from_xlogdb_line(line) if not xlog.is_any_xlog_file(wal_info.name): output.error( "invalid WAL segment name %r\n" 'HINT: Please run "barman rebuild-xlogdb %s" ' "to solve this issue", wal_info.name, self.config.name, ) continue # Keeps the WAL segment if it is a history file keep = xlog.is_history_file(wal_info.name) # Keeps the WAL segment if its timeline is in # `timelines_to_protect` if timelines_to_protect: tli, _, _ = xlog.decode_segment_name(wal_info.name) keep |= tli in timelines_to_protect # Keeps the WAL segment if it is a newer # than the given backup (the first available) if backup_info and backup_info.begin_wal is not None: keep |= wal_info.name >= backup_info.begin_wal # If the file has to be kept write it in the new xlogdb # otherwise delete it and record it in the removed list if keep: fxlogdb_new.write(wal_info.to_xlogdb_line()) else: self.delete_wal(wal_info) removed.append(wal_info.name) fxlogdb_new.flush() fxlogdb_new.seek(0) fxlogdb.seek(0) shutil.copyfileobj(fxlogdb_new, fxlogdb) fxlogdb.truncate() return removed
def main(args=None): """ The main script entry point :param list[str] args: the raw arguments list. When not provided it defaults to sys.args[1:] """ config = parse_arguments(args) configure_logging(config) # Read wal_path from environment if we're a hook script if __is_hook_script(): if "BARMAN_FILE" not in os.environ: raise BarmanException( "Expected environment variable BARMAN_FILE not set") config.wal_path = os.getenv("BARMAN_FILE") else: if config.wal_path is None: raise BarmanException( "the following arguments are required: wal_path") # Validate the WAL file name before uploading it if not is_any_xlog_file(config.wal_path): logging.error("%s is an invalid name for a WAL file" % config.wal_path) raise CLIErrorExit() try: cloud_interface = get_cloud_interface(config) with closing(cloud_interface): uploader = CloudWalUploader( cloud_interface=cloud_interface, server_name=config.server_name, compression=config.compression, ) if not cloud_interface.test_connectivity(): raise NetworkErrorExit() # If test is requested, just exit after connectivity test elif config.test: raise SystemExit(0) # TODO: Should the setup be optional? cloud_interface.setup_bucket() upload_kwargs = {} if is_history_file(config.wal_path): upload_kwargs["override_tags"] = config.history_tags uploader.upload_wal(config.wal_path, **upload_kwargs) except Exception as exc: logging.error("Barman cloud WAL archiver exception: %s", force_str(exc)) logging.debug("Exception details:", exc_info=exc) raise GeneralErrorExit()
def remove_wal_before_backup(self, backup_info, timelines_to_protect=None): """ Remove WAL files which have been archived before the start of the provided backup. If no backup_info is provided delete all available WAL files If timelines_to_protect list is passed, never remove a wal in one of these timelines. :param BackupInfo|None backup_info: the backup information structure :param set timelines_to_protect: optional list of timelines to protect :return list: a list of removed WAL files """ removed = [] with self.server.xlogdb() as fxlogdb: xlogdb_new = fxlogdb.name + ".new" with open(xlogdb_new, 'w') as fxlogdb_new: for line in fxlogdb: wal_info = WalFileInfo.from_xlogdb_line(line) if not xlog.is_any_xlog_file(wal_info.name): output.error( "invalid xlog segment name %r\n" "HINT: Please run \"barman rebuild-xlogdb %s\" " "to solve this issue", wal_info.name, self.config.name) continue # Keeps the WAL segment if it is a history file keep = xlog.is_history_file(wal_info.name) # Keeps the WAL segment if its timeline is in # `timelines_to_protect` if timelines_to_protect: tli, _, _ = xlog.decode_segment_name(wal_info.name) keep |= tli in timelines_to_protect # Keeps the WAL segment if it is a newer # than the given backup (the first available) if backup_info: keep |= wal_info.name >= backup_info.begin_wal # If the file has to be kept write it in the new xlogdb # otherwise delete it and record it in the removed list if keep: fxlogdb_new.write(wal_info.to_xlogdb_line()) else: self.delete_wal(wal_info) removed.append(wal_info.name) fxlogdb_new.flush() os.fsync(fxlogdb_new.fileno()) shutil.move(xlogdb_new, fxlogdb.name) fsync_dir(os.path.dirname(fxlogdb.name)) return removed
def remove_wal_before_backup(self, backup_info): """ Remove WAL files which have been archived before the start of the provided backup. If no backup_info is provided delete all available WAL files :param BackupInfo|None backup_info: the backup information structure :return list: a list of removed WAL files """ removed = [] with self.server.xlogdb() as fxlogdb: xlogdb_new = fxlogdb.name + ".new" with open(xlogdb_new, 'w') as fxlogdb_new: for line in fxlogdb: wal_info = WalFileInfo.from_xlogdb_line(line) if not xlog.is_any_xlog_file(wal_info.name): output.error( "invalid xlog segment name %r\n" "HINT: Please run \"barman rebuild-xlogdb %s\" " "to solve this issue", wal_info.name, self.config.name) continue # Keeps the WAL segment if it is a history file or later # than the given backup (the first available) if (xlog.is_history_file(wal_info.name) or (backup_info and wal_info.name >= backup_info.begin_wal)): fxlogdb_new.write(wal_info.to_xlogdb_line()) continue else: self.delete_wal(wal_info) removed.append(wal_info.name) fxlogdb_new.flush() os.fsync(fxlogdb_new.fileno()) shutil.move(xlogdb_new, fxlogdb.name) fsync_dir(os.path.dirname(fxlogdb.name)) return removed
def test_history_file(self): assert not xlog.is_history_file('000000000000000200000001') assert not xlog.is_history_file( '00000001000000000000000A.00000020.backup') assert xlog.is_history_file('00000002.history') assert xlog.is_history_file('test/00000002.history') assert not xlog.is_history_file('00000000000000000000000') assert not xlog.is_history_file('0000000000000000000000000') assert not xlog.is_history_file('000000000000X00000000000') assert not xlog.is_history_file('00000001000000000000000A.backup') assert not xlog.is_any_xlog_file( 'test.00000001000000000000000A.00000020.backup') assert not xlog.is_history_file('00000001000000000000000A.history') assert not xlog.is_history_file('00000001000000000000000A.partial') assert not xlog.is_history_file('00000001.partial')
def rebuild_xlogdb(self): """ Rebuild the whole xlog database guessing it from the archive content. """ from os.path import isdir, join output.info("Rebuilding xlogdb for server %s", self.config.name) root = self.config.wals_directory default_compression = self.config.compression wal_count = label_count = history_count = 0 # lock the xlogdb as we are about replacing it completely with self.server.xlogdb('w') as fxlogdb: xlogdb_new = fxlogdb.name + ".new" with open(xlogdb_new, 'w') as fxlogdb_new: for name in sorted(os.listdir(root)): # ignore the xlogdb and its lockfile if name.startswith(self.server.XLOG_DB): continue fullname = join(root, name) if isdir(fullname): # all relevant files are in subdirectories hash_dir = fullname for wal_name in sorted(os.listdir(hash_dir)): fullname = join(hash_dir, wal_name) if isdir(fullname): _logger.warning( 'unexpected directory ' 'rebuilding the wal database: %s', fullname) else: if xlog.is_wal_file(fullname): wal_count += 1 elif xlog.is_backup_file(fullname): label_count += 1 else: _logger.warning( 'unexpected file ' 'rebuilding the wal database: %s', fullname) continue wal_info = WalFileInfo.from_file( fullname, default_compression=default_compression) fxlogdb_new.write(wal_info.to_xlogdb_line()) else: # only history files are here if xlog.is_history_file(fullname): history_count += 1 wal_info = WalFileInfo.from_file( fullname, default_compression=default_compression) fxlogdb_new.write(wal_info.to_xlogdb_line()) else: _logger.warning( 'unexpected file ' 'rebuilding the wal database: %s', fullname) os.fsync(fxlogdb_new.fileno()) shutil.move(xlogdb_new, fxlogdb.name) fsync_dir(os.path.dirname(fxlogdb.name)) output.info('Done rebuilding xlogdb for server %s ' '(history: %s, backup_labels: %s, wal_file: %s)', self.config.name, history_count, label_count, wal_count)
def archive_wal(self, verbose=True): """ Executes WAL maintenance operations, such as archiving and compression If verbose is set to False, outputs something only if there is at least one file :param bool verbose: report even if no actions """ found = False compressor = self.compression_manager.get_compressor() with self.server.xlogdb('a') as fxlogdb: if verbose: output.info("Processing xlog segments for %s", self.config.name, log=False) # Get the first available backup first_backup_id = self.get_first_backup(BackupInfo.STATUS_NOT_EMPTY) first_backup = self.server.get_backup(first_backup_id) for filename in sorted(glob( os.path.join(self.config.incoming_wals_directory, '*'))): if not found and not verbose: output.info("Processing xlog segments for %s", self.config.name, log=False) found = True # Create WAL Info object wal_info = WalFileInfo.from_file(filename, compression=None) # If there are no available backups ... if first_backup is None: # ... delete xlog segments only for exclusive backups if BackupOptions.CONCURRENT_BACKUP \ not in self.config.backup_options: # Skipping history files if not xlog.is_history_file(filename): output.info("\tNo base backup available." " Trashing file %s" " from server %s", wal_info.name, self.config.name) os.unlink(filename) continue # ... otherwise else: # ... delete xlog segments older than the first backup if wal_info.name < first_backup.begin_wal: # Skipping history files if not xlog.is_history_file(filename): output.info("\tOlder than first backup." " Trashing file %s" " from server %s", wal_info.name, self.config.name) os.unlink(filename) continue # Report to the user the WAL file we are archiving output.info("\t%s", os.path.basename(filename), log=False) _logger.info("Archiving %s/%s", self.config.name, os.path.basename(filename)) # Archive the WAL file try: self.cron_wal_archival(compressor, wal_info) except AbortedRetryHookScript as e: _logger.warning("Archiving of %s/%s aborted by " "pre_archive_retry_script." "Reason: %s" % (self.config.name, os.path.basename(), e)) return # Updates the information of the WAL archive with # the latest segments fxlogdb.write(wal_info.to_xlogdb_line()) # flush and fsync for every line fxlogdb.flush() os.fsync(fxlogdb.fileno()) if not found and verbose: output.info("\tno file found", log=False)
def _remove_wals_for_backup( cloud_interface, catalog, deleted_backup, dry_run, skip_wal_cleanup_if_standalone=True, ): # An implementation of BackupManager.remove_wal_before_backup which does not # use xlogdb, since xlogdb is not available to barman-cloud should_remove_wals, wal_ranges_to_protect = BackupManager.should_remove_wals( deleted_backup, catalog.get_backup_list(), keep_manager=catalog, skip_wal_cleanup_if_standalone=skip_wal_cleanup_if_standalone, ) next_backup = BackupManager.find_next_backup_in( catalog.get_backup_list(), deleted_backup.backup_id ) wals_to_delete = {} if should_remove_wals: # There is no previous backup or all previous backups are archival # standalone backups, so we can remove unused WALs (those WALs not # required by standalone archival backups). # If there is a next backup then all unused WALs up to the begin_wal # of the next backup can be removed. # If there is no next backup then there are no remaining backups, # because we must assume non-exclusive backups are taken, we can only # safely delete unused WALs up to begin_wal of the deleted backup. # See comments in barman.backup.BackupManager.delete_backup. if next_backup: remove_until = next_backup else: remove_until = deleted_backup # A WAL is only a candidate for deletion if it is on the same timeline so we # use BackupManager to get a set of all other timelines with backups so that # we can preserve all WALs on other timelines. timelines_to_protect = BackupManager.get_timelines_to_protect( remove_until=remove_until, deleted_backup=deleted_backup, available_backups=catalog.get_backup_list(), ) try: wal_paths = catalog.get_wal_paths() except Exception as exc: logging.error( "Cannot clean up WALs for backup %s because an error occurred listing WALs: %s", deleted_backup.backup_id, force_str(exc), ) return for wal_name, wal in wal_paths.items(): if xlog.is_history_file(wal_name): continue if timelines_to_protect: tli, _, _ = xlog.decode_segment_name(wal_name) if tli in timelines_to_protect: continue # Check if the WAL is in a protected range, required by an archival # standalone backup - so do not delete it if xlog.is_backup_file(wal_name): # If we have a backup file, truncate the name for the range check range_check_wal_name = wal_name[:24] else: range_check_wal_name = wal_name if any( range_check_wal_name >= begin_wal and range_check_wal_name <= end_wal for begin_wal, end_wal in wal_ranges_to_protect ): continue if wal_name < remove_until.begin_wal: wals_to_delete[wal_name] = wal # Explicitly sort because dicts are not ordered in python < 3.6 wal_paths_to_delete = sorted(wals_to_delete.values()) if len(wal_paths_to_delete) > 0: if not dry_run: try: cloud_interface.delete_objects(wal_paths_to_delete) except Exception as exc: logging.error( "Could not delete the following WALs for backup %s: %s, Reason: %s", deleted_backup.backup_id, wal_paths_to_delete, force_str(exc), ) # Return early so that we leave the WALs in the local cache so they # can be cleaned up should there be a subsequent backup deletion. return else: print( "Skipping deletion of objects %s due to --dry-run option" % wal_paths_to_delete ) for wal_name in wals_to_delete.keys(): catalog.remove_wal_from_cache(wal_name)
def rebuild_xlogdb(self): """ Rebuild the whole xlog database guessing it from the archive content. """ from os.path import isdir, join output.info("Rebuilding xlogdb for server %s", self.config.name) root = self.config.wals_directory comp_manager = self.compression_manager wal_count = label_count = history_count = 0 # lock the xlogdb as we are about replacing it completely with self.server.xlogdb('w') as fxlogdb: xlogdb_new = fxlogdb.name + ".new" with open(xlogdb_new, 'w') as fxlogdb_new: for name in sorted(os.listdir(root)): # ignore the xlogdb and its lockfile if name.startswith(self.server.XLOG_DB): continue fullname = join(root, name) if isdir(fullname): # all relevant files are in subdirectories hash_dir = fullname for wal_name in sorted(os.listdir(hash_dir)): fullname = join(hash_dir, wal_name) if isdir(fullname): _logger.warning( 'unexpected directory ' 'rebuilding the wal database: %s', fullname) else: if xlog.is_wal_file(fullname): wal_count += 1 elif xlog.is_backup_file(fullname): label_count += 1 elif fullname.endswith('.tmp'): _logger.warning( 'temporary file found ' 'rebuilding the wal database: %s', fullname) continue else: _logger.warning( 'unexpected file ' 'rebuilding the wal database: %s', fullname) continue wal_info = comp_manager.get_wal_file_info( fullname) fxlogdb_new.write(wal_info.to_xlogdb_line()) else: # only history files are here if xlog.is_history_file(fullname): history_count += 1 wal_info = comp_manager.get_wal_file_info(fullname) fxlogdb_new.write(wal_info.to_xlogdb_line()) else: _logger.warning( 'unexpected file ' 'rebuilding the wal database: %s', fullname) os.fsync(fxlogdb_new.fileno()) shutil.move(xlogdb_new, fxlogdb.name) fsync_dir(os.path.dirname(fxlogdb.name)) output.info( 'Done rebuilding xlogdb for server %s ' '(history: %s, backup_labels: %s, wal_file: %s)', self.config.name, history_count, label_count, wal_count)
def test_history_file(self): assert not xlog.is_history_file("000000000000000200000001") assert not xlog.is_history_file("00000001000000000000000A.00000020.backup") assert xlog.is_history_file("00000002.history") assert xlog.is_history_file("test/00000002.history") assert not xlog.is_history_file("00000000000000000000000") assert not xlog.is_history_file("0000000000000000000000000") assert not xlog.is_history_file("000000000000X00000000000") assert not xlog.is_history_file("00000001000000000000000A.backup") assert not xlog.is_any_xlog_file( "test.00000001000000000000000A.00000020.backup" ) assert not xlog.is_history_file("00000001000000000000000A.history") assert not xlog.is_history_file("00000001000000000000000A.partial") assert not xlog.is_history_file("00000001.partial")
def archive(self, first_backup, fxlogdb, verbose=True): """ Archive WAL files, discarding duplicates or those that are not valid. :param BackupInfo first_backup: BackupInfo of the oldest backup for the current server :param file fxlogdb: File object for xlogdb interactions :param boolean verbose: Flag for verbose output """ compressor = self.backup_manager.compression_manager.get_compressor() stamp = datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%SZ') found = False if verbose: output.info("Processing xlog segments from %s for %s", self.name, self.config.name, log=False) batch = self.get_next_batch() for wal_info in batch: if not found and not verbose: output.info("Processing xlog segments from %s for %s", self.name, self.config.name, log=False) found = True # Delete the xlog segment if no backup is present and # backup strategy is not concurrent and # the wal file is not a history file if (first_backup is None and BackupOptions.CONCURRENT_BACKUP not in self.config.backup_options and not xlog.is_history_file(wal_info.name)): output.info("\tNo base backup available. " "Trashing file %s from server %s", wal_info.name, self.config.name) os.unlink(wal_info.orig_filename) continue # ... otherwise move the wal file in the error directory # if not relevant according to the first backup present elif not self.is_wal_relevant(wal_info, first_backup): error_dst = os.path.join( self.config.errors_directory, "%s.%s.error" % (wal_info.name, stamp)) shutil.move(wal_info.orig_filename, error_dst) continue # Report to the user the WAL file we are archiving output.info("\t%s", wal_info.name, log=False) _logger.info("Archiving %s/%s", self.config.name, wal_info.name) # Archive the WAL file try: self.archive_wal(compressor, wal_info) except MatchingDuplicateWalFile: # We already have this file. Simply unlink the file. os.unlink(wal_info.orig_filename) continue except DuplicateWalFile: output.info("\tError: %s is already present in server %s. " "File moved to errors directory.", wal_info.name, self.config.name) error_dst = os.path.join( self.config.errors_directory, "%s.%s.duplicate" % (wal_info.name, stamp)) # TODO: cover corner case of duplication (unlikely, # but theoretically possible) shutil.move(wal_info.orig_filename, error_dst) continue except AbortedRetryHookScript as e: _logger.warning("Archiving of %s/%s aborted by " "pre_archive_retry_script." "Reason: %s" % (self.config.name, wal_info.name, e)) return # Updates the information of the WAL archive with # the latest segments fxlogdb.write(wal_info.to_xlogdb_line()) # flush and fsync for every line fxlogdb.flush() os.fsync(fxlogdb.fileno()) if not found and verbose: output.info("\tno file found", log=False) if batch.errors: output.info("Some unknown objects have been found while " "processing xlog segments for %s. " "Objects moved to errors directory:", self.config.name, log=False) for error in batch.errors: output.info("\t%s", error) error_dst = os.path.join( self.config.errors_directory, "%s.%s.unknown" % (os.path.basename(error), stamp)) shutil.move(error, error_dst)
def rebuild_xlogdb(self): """ Rebuild the whole xlog database guessing it from the archive content. """ from os.path import isdir, join output.info("Rebuilding xlogdb for server %s", self.config.name) root = self.config.wals_directory comp_manager = self.compression_manager wal_count = label_count = history_count = 0 # lock the xlogdb as we are about replacing it completely with self.server.xlogdb("w") as fxlogdb: xlogdb_dir = os.path.dirname(fxlogdb.name) with tempfile.TemporaryFile(mode="w+", dir=xlogdb_dir) as fxlogdb_new: for name in sorted(os.listdir(root)): # ignore the xlogdb and its lockfile if name.startswith(self.server.XLOG_DB): continue fullname = join(root, name) if isdir(fullname): # all relevant files are in subdirectories hash_dir = fullname for wal_name in sorted(os.listdir(hash_dir)): fullname = join(hash_dir, wal_name) if isdir(fullname): _logger.warning( "unexpected directory " "rebuilding the wal database: %s", fullname, ) else: if xlog.is_wal_file(fullname): wal_count += 1 elif xlog.is_backup_file(fullname): label_count += 1 elif fullname.endswith(".tmp"): _logger.warning( "temporary file found " "rebuilding the wal database: %s", fullname, ) continue else: _logger.warning( "unexpected file " "rebuilding the wal database: %s", fullname, ) continue wal_info = comp_manager.get_wal_file_info( fullname) fxlogdb_new.write(wal_info.to_xlogdb_line()) else: # only history files are here if xlog.is_history_file(fullname): history_count += 1 wal_info = comp_manager.get_wal_file_info(fullname) fxlogdb_new.write(wal_info.to_xlogdb_line()) else: _logger.warning( "unexpected file rebuilding the wal database: %s", fullname, ) fxlogdb_new.flush() fxlogdb_new.seek(0) fxlogdb.seek(0) shutil.copyfileobj(fxlogdb_new, fxlogdb) fxlogdb.truncate() output.info( "Done rebuilding xlogdb for server %s " "(history: %s, backup_labels: %s, wal_file: %s)", self.config.name, history_count, label_count, wal_count, )