def recover_xlog_copy(self, decompressor, xlogs, wal_dest, remote_command=None): ''' Restore WAL segments :param decompressor: the decompressor for the file (if any) :param xlogs: the xlog dictionary to recover :param wal_dest: the destination directory for xlog recover :param remote_command: default None. The remote command to recover the xlog, in case of remote backup. ''' rsync = RsyncPgData(ssh=remote_command) if remote_command: # If remote recovery tell rsync to copy them remotely wal_dest = ':%s' % wal_dest else: # we will not use rsync: destdir must exists if not os.path.exists(wal_dest): os.makedirs(wal_dest) if decompressor and remote_command: xlog_spool = tempfile.mkdtemp(prefix='barman_xlog-') for prefix in xlogs: source_dir = os.path.join(self.config.wals_directory, prefix) if decompressor: if remote_command: for segment in xlogs[prefix]: decompressor(os.path.join(source_dir, segment), os.path.join(xlog_spool, segment)) rsync.from_file_list(xlogs[prefix], xlog_spool, wal_dest) for segment in xlogs[prefix]: os.unlink(os.path.join(xlog_spool, segment)) else: # decompress directly to the right place for segment in xlogs[prefix]: decompressor(os.path.join(source_dir, segment), os.path.join(wal_dest, segment)) else: rsync.from_file_list( xlogs[prefix], "%s/" % os.path.join(self.config.wals_directory, prefix), wal_dest) if decompressor and remote_command: shutil.rmtree(xlog_spool)
def recover_xlog_copy(self, decompressor, xlogs, wal_dest, remote_command=None): ''' Restore WAL segments :param decompressor: the decompressor for the file (if any) :param xlogs: the xlog dictionary to recover :param wal_dest: the destination directory for xlog recover :param remote_command: default None. The remote command to recover the xlog, in case of remote backup. ''' rsync = RsyncPgData(ssh=remote_command) if remote_command: # If remote recovery tell rsync to copy them remotely wal_dest = ':%s' % wal_dest else: # we will not use rsync: destdir must exists if not os.path.exists(wal_dest): os.makedirs(wal_dest) if decompressor and remote_command: xlog_spool = tempfile.mkdtemp(prefix='barman_xlog-') for prefix in xlogs: source_dir = os.path.join(self.config.wals_directory, prefix) if decompressor: if remote_command: for segment in xlogs[prefix]: decompressor(os.path.join(source_dir, segment), os.path.join(xlog_spool, segment)) rsync.from_file_list(xlogs[prefix], xlog_spool, wal_dest) for segment in xlogs[prefix]: os.unlink(os.path.join(xlog_spool, segment)) else: # decompress directly to the right place for segment in xlogs[prefix]: decompressor(os.path.join(source_dir, segment), os.path.join(wal_dest, segment)) else: rsync.from_file_list(xlogs[prefix], "%s/" % os.path.join(self.config.wals_directory, prefix), wal_dest) if decompressor and remote_command: shutil.rmtree(xlog_spool)
def xlog_copy(self, required_xlog_files, wal_dest, remote_command): """ Restore WAL segments :param required_xlog_files: list of all required WAL files :param wal_dest: the destination directory for xlog recover :param remote_command: default None. The remote command to recover the xlog, in case of remote backup. """ # List of required WAL files partitioned by containing directory xlogs = collections.defaultdict(list) # add '/' suffix to ensure it is a directory wal_dest = '%s/' % wal_dest # Map of every compressor used with any WAL file in the archive, # to be used during this recovery compressors = {} compression_manager = self.backup_manager.compression_manager # Fill xlogs and compressors maps from required_xlog_files for wal_info in required_xlog_files: hashdir = xlog.hash_dir(wal_info.name) xlogs[hashdir].append(wal_info) # If a compressor is required, make sure it exists in the cache if wal_info.compression is not None and \ wal_info.compression not in compressors: compressors[wal_info.compression] = \ compression_manager.get_compressor( compression=wal_info.compression) rsync = RsyncPgData( path=self.server.path, ssh=remote_command, bwlimit=self.config.bandwidth_limit, network_compression=self.config.network_compression) # If compression is used and this is a remote recovery, we need a # temporary directory where to spool uncompressed files, # otherwise we either decompress every WAL file in the local # destination, or we ship the uncompressed file remotely if compressors: if remote_command: # Decompress to a temporary spool directory wal_decompression_dest = tempfile.mkdtemp( prefix='barman_xlog-') else: # Decompress directly to the destination directory wal_decompression_dest = wal_dest # Make sure wal_decompression_dest exists mkpath(wal_decompression_dest) else: # If no compression wal_decompression_dest = None if remote_command: # If remote recovery tell rsync to copy them remotely # add ':' prefix to mark it as remote wal_dest = ':%s' % wal_dest total_wals = sum(map(len, xlogs.values())) partial_count = 0 for prefix in sorted(xlogs): batch_len = len(xlogs[prefix]) partial_count += batch_len source_dir = os.path.join(self.config.wals_directory, prefix) _logger.info( "Starting copy of %s WAL files %s/%s from %s to %s", batch_len, partial_count, total_wals, xlogs[prefix][0], xlogs[prefix][-1]) # If at least one compressed file has been found, activate # compression check and decompression for each WAL files if compressors: for segment in xlogs[prefix]: dst_file = os.path.join(wal_decompression_dest, segment.name) if segment.compression is not None: compressors[segment.compression].decompress( os.path.join(source_dir, segment.name), dst_file) else: shutil.copy2(os.path.join(source_dir, segment.name), dst_file) if remote_command: try: # Transfer the WAL files rsync.from_file_list( list(segment.name for segment in xlogs[prefix]), wal_decompression_dest, wal_dest) except CommandFailedException as e: msg = ("data transfer failure while copying WAL files " "to directory '%s'") % (wal_dest[1:],) raise DataTransferFailure.from_rsync_error(e, msg) # Cleanup files after the transfer for segment in xlogs[prefix]: file_name = os.path.join(wal_decompression_dest, segment.name) try: os.unlink(file_name) except OSError as e: output.warning( "Error removing temporary file '%s': %s", file_name, e) else: try: rsync.from_file_list( list(segment.name for segment in xlogs[prefix]), "%s/" % os.path.join(self.config.wals_directory, prefix), wal_dest) except CommandFailedException as e: msg = "data transfer failure while copying WAL files " \ "to directory '%s'" % (wal_dest[1:],) raise DataTransferFailure.from_rsync_error(e, msg) _logger.info("Finished copying %s WAL files.", total_wals) # Remove local decompression target directory if different from the # destination directory (it happens when compression is in use during a # remote recovery if wal_decompression_dest and wal_decompression_dest != wal_dest: shutil.rmtree(wal_decompression_dest)
def _generate_recovery_conf(self, recovery_info, backup_info, dest, exclusive, remote_command, target_name, target_time, target_tli, target_xid): """ Generate a recovery.conf file for PITR containing all the required configurations :param dict recovery_info: Dictionary containing all the recovery parameters :param barman.infofile.BackupInfo backup_info: representation of a backup :param str dest: destination directory of the recovery :param boolean exclusive: exclusive backup or concurrent :param str remote_command: ssh command for remote connection :param str target_name: recovery target name for PITR :param str target_time: recovery target time for PITR :param str target_tli: recovery target timeline for PITR :param str target_xid: recovery target transaction id for PITR """ if remote_command: recovery = open( os.path.join(recovery_info['tempdir'], 'recovery.conf'), 'w') else: recovery = open(os.path.join(dest, 'recovery.conf'), 'w') # If GET_WAL has been set, use the get-wal command to retrieve the # required wal files. Otherwise use the unix command "cp" to copy # them from the barman_xlog directory if recovery_info['get_wal']: # We need to create the right restore command. # If we are doing a remote recovery, # the barman-cli package is REQUIRED on the server that is hosting # the PostgreSQL server. # We use the machine FQDN and the barman_user # setting to call the barman-wal-restore correctly. # If local recovery, we use barman directly, assuming # the postgres process will be executed with the barman user. # It MUST to be reviewed by the user in any case. if remote_command: fqdn = socket.getfqdn() print( "# The 'barman-wal-restore' command " "is provided in the 'barman-cli' package", file=recovery) print("restore_command = 'barman-wal-restore -U %s " "%s %s %%f %%p'" % (self.config.config.user, fqdn, self.config.name), file=recovery) else: print("# The 'barman get-wal' command " "must run as '%s' user" % self.config.config.user, file=recovery) print("restore_command = 'sudo -u %s " "barman get-wal %s %%f > %%p'" % (self.config.config.user, self.config.name), file=recovery) recovery_info['results']['get_wal'] = True else: print("restore_command = 'cp barman_xlog/%f %p'", file=recovery) if backup_info.version >= 80400 and \ not recovery_info['get_wal']: print("recovery_end_command = 'rm -fr barman_xlog'", file=recovery) if target_time: print("recovery_target_time = '%s'" % target_time, file=recovery) if target_tli: print("recovery_target_timeline = %s" % target_tli, file=recovery) if target_xid: print("recovery_target_xid = '%s'" % target_xid, file=recovery) if target_name: print("recovery_target_name = '%s'" % target_name, file=recovery) if (target_xid or target_time) and exclusive: print("recovery_target_inclusive = '%s'" % (not exclusive), file=recovery) recovery.close() if remote_command: plain_rsync = RsyncPgData( path=self.server.path, ssh=remote_command, bwlimit=self.config.bandwidth_limit, network_compression=self.config.network_compression) try: plain_rsync.from_file_list(['recovery.conf'], recovery_info['tempdir'], ':%s' % dest) except CommandFailedException as e: output.error('remote copy of recovery.conf failed: %s', e) output.close_and_exit()
def _xlog_copy(self, required_xlog_files, wal_dest, remote_command): """ Restore WAL segments :param required_xlog_files: list of all required WAL files :param wal_dest: the destination directory for xlog recover :param remote_command: default None. The remote command to recover the xlog, in case of remote backup. """ # List of required WAL files partitioned by containing directory xlogs = collections.defaultdict(list) # add '/' suffix to ensure it is a directory wal_dest = '%s/' % wal_dest # Map of every compressor used with any WAL file in the archive, # to be used during this recovery compressors = {} compression_manager = self.backup_manager.compression_manager # Fill xlogs and compressors maps from required_xlog_files for wal_info in required_xlog_files: hashdir = xlog.hash_dir(wal_info.name) xlogs[hashdir].append(wal_info) # If a compressor is required, make sure it exists in the cache if wal_info.compression is not None and \ wal_info.compression not in compressors: compressors[wal_info.compression] = \ compression_manager.get_compressor( compression=wal_info.compression) rsync = RsyncPgData( path=self.server.path, ssh=remote_command, bwlimit=self.config.bandwidth_limit, network_compression=self.config.network_compression) # If compression is used and this is a remote recovery, we need a # temporary directory where to spool uncompressed files, # otherwise we either decompress every WAL file in the local # destination, or we ship the uncompressed file remotely if compressors: if remote_command: # Decompress to a temporary spool directory wal_decompression_dest = tempfile.mkdtemp( prefix='barman_xlog-') else: # Decompress directly to the destination directory wal_decompression_dest = wal_dest # Make sure wal_decompression_dest exists mkpath(wal_decompression_dest) else: # If no compression wal_decompression_dest = None if remote_command: # If remote recovery tell rsync to copy them remotely # add ':' prefix to mark it as remote wal_dest = ':%s' % wal_dest total_wals = sum(map(len, xlogs.values())) partial_count = 0 for prefix in sorted(xlogs): batch_len = len(xlogs[prefix]) partial_count += batch_len source_dir = os.path.join(self.config.wals_directory, prefix) _logger.info("Starting copy of %s WAL files %s/%s from %s to %s", batch_len, partial_count, total_wals, xlogs[prefix][0], xlogs[prefix][-1]) # If at least one compressed file has been found, activate # compression check and decompression for each WAL files if compressors: for segment in xlogs[prefix]: dst_file = os.path.join(wal_decompression_dest, segment.name) if segment.compression is not None: compressors[segment.compression].decompress( os.path.join(source_dir, segment.name), dst_file) else: shutil.copy2(os.path.join(source_dir, segment.name), dst_file) if remote_command: try: # Transfer the WAL files rsync.from_file_list( list(segment.name for segment in xlogs[prefix]), wal_decompression_dest, wal_dest) except CommandFailedException as e: msg = ("data transfer failure while copying WAL files " "to directory '%s'") % (wal_dest[1:], ) raise DataTransferFailure.from_command_error( 'rsync', e, msg) # Cleanup files after the transfer for segment in xlogs[prefix]: file_name = os.path.join(wal_decompression_dest, segment.name) try: os.unlink(file_name) except OSError as e: output.warning( "Error removing temporary file '%s': %s", file_name, e) else: try: rsync.from_file_list( list(segment.name for segment in xlogs[prefix]), "%s/" % os.path.join(self.config.wals_directory, prefix), wal_dest) except CommandFailedException as e: msg = "data transfer failure while copying WAL files " \ "to directory '%s'" % (wal_dest[1:],) raise DataTransferFailure.from_command_error( 'rsync', e, msg) _logger.info("Finished copying %s WAL files.", total_wals) # Remove local decompression target directory if different from the # destination directory (it happens when compression is in use during a # remote recovery if wal_decompression_dest and wal_decompression_dest != wal_dest: shutil.rmtree(wal_decompression_dest)
def recover(self, backup, dest, tablespaces, target_tli, target_time, target_xid, exclusive, remote_command): ''' Performs a recovery of a backup :param backup: the backup to recover :param dest: the destination directory :param tablespaces: a dictionary of tablespaces :param target_tli: the target timeline :param target_time: the target time :param target_xid: the target xid :param exclusive: whether the recovery is exlusive or not :param remote_command: default None. The remote command to recover the base backup, in case of remote backup. ''' for line in self.cron(False): yield line recovery_dest = 'local' if remote_command: recovery_dest = 'remote' rsync = RsyncPgData(ssh=remote_command) msg = "Starting %s restore for server %s using backup %s " % (recovery_dest, self.config.name, backup.backup_id) yield msg _logger.info(msg) msg = "Destination directory: %s" % dest yield msg _logger.info(msg) if backup.tablespaces: if remote_command: # TODO: remote dir preparation msg = "Skipping remote directory preparation, you must have done it by yourself." yield msg _logger.warning(msg) else: tblspc_dir = os.path.join(dest, 'pg_tblspc') if not os.path.exists(tblspc_dir): os.makedirs(tblspc_dir) for name, oid, location in backup.tablespaces: try: if name in tablespaces: location = tablespaces[name] tblspc_file = os.path.join(tblspc_dir, str(oid)) if os.path.exists(tblspc_file): os.unlink(tblspc_file) if os.path.exists(location) and not os.path.isdir(location): os.unlink(location) if not os.path.exists(location): os.makedirs(location) # test permissiones barman_write_check_file = os.path.join(location, '.barman_write_check') file(barman_write_check_file, 'a').close() os.unlink(barman_write_check_file) os.symlink(location, tblspc_file) except: msg = "ERROR: unable to prepare '%s' tablespace (destination '%s')" % (name, location) _logger.critical(msg) raise SystemExit(msg) yield "\t%s, %s, %s" % (oid, name, location) target_epoch = None if target_time: try: target_datetime = dateutil.parser.parse(target_time) except: msg = "ERROR: unable to parse the target time parameter %r" % target_time _logger.critical(msg) raise SystemExit(msg) target_epoch = time.mktime(target_datetime.timetuple()) + (target_datetime.microsecond / 1000000.) if target_time or target_xid or (target_tli and target_tli != backup.timeline): targets = {} if target_time: targets['time'] = str(target_datetime) if target_xid: targets['xid'] = str(target_xid) if target_tli and target_tli != backup.timeline: targets['timeline'] = str(target_tli) yield "Doing PITR. Recovery target %s" % \ (", ".join(["%s: %r" % (k, v) for k, v in targets.items()])) # Copy the base backup msg = "Copying the base backup." yield msg _logger.info(msg) self.recover_basebackup_copy(backup, dest, remote_command) _logger.info("Base backup copied.") # Prepare WAL segments local directory msg = "Copying required wal segments." _logger.info(msg) yield msg if target_time or target_xid or (target_tli and target_tli != backup.timeline): wal_dest = os.path.join(dest, 'barman_xlog') else: wal_dest = os.path.join(dest, 'pg_xlog') # Retrieve the list of required WAL segments according to recovery options xlogs = {} required_xlog_files = tuple(self.server.get_required_xlog_files(backup, target_tli, target_epoch, target_xid)) for filename in required_xlog_files: hashdir = xlog.hash_dir(filename) if hashdir not in xlogs: xlogs[hashdir] = [] xlogs[hashdir].append(filename) # Check decompression options decompressor = self.compression_manager.get_decompressor() # Restore WAL segments self.recover_xlog_copy(decompressor, xlogs, wal_dest, remote_command) _logger.info("Wal segmets copied.") # Generate recovery.conf file (only if needed by PITR) if target_time or target_xid or (target_tli and target_tli != backup.timeline): msg = "Generating recovery.conf" yield msg _logger.info(msg) if remote_command: tempdir = tempfile.mkdtemp(prefix='barman_recovery-') recovery = open(os.path.join(tempdir, 'recovery.conf'), 'w') else: recovery = open(os.path.join(dest, 'recovery.conf'), 'w') print >> recovery, "restore_command = 'cp barman_xlog/%f %p'" print >> recovery, "recovery_end_command = 'rm -fr barman_xlog'" if target_time: print >> recovery, "recovery_target_time = '%s'" % target_time if target_tli: print >> recovery, "recovery_target_timeline = %s" % target_tli if target_xid: print >> recovery, "recovery_target_xid = '%s'" % target_xid if exclusive: print >> recovery, "recovery_target_inclusive = '%s'" % (not exclusive) recovery.close() if remote_command: recovery = rsync.from_file_list(['recovery.conf'], tempdir, ':%s' % dest) shutil.rmtree(tempdir) _logger.info('recovery.conf generated') else: # avoid shipping of just recovered pg_xlog files if remote_command: status_dir = tempfile.mkdtemp(prefix='barman_xlog_status-') else: status_dir = os.path.join(wal_dest, 'archive_status') os.makedirs(status_dir) # no need to check, it must not exist for filename in required_xlog_files: with file(os.path.join(status_dir, "%s.done" % filename), 'a') as f: f.write('') if remote_command: retval = rsync('%s/' % status_dir, ':%s' % os.path.join(wal_dest, 'archive_status')) if retval != 0: msg = "WARNING: unable to populate pg_xlog/archive_status dorectory" yield msg _logger.warning(msg) shutil.rmtree(status_dir) # Disable dangerous setting in the target data dir if remote_command: tempdir = tempfile.mkdtemp(prefix='barman_recovery-') pg_config = os.path.join(tempdir, 'postgresql.conf') shutil.copy2(os.path.join(backup.get_basebackup_directory(), 'pgdata', 'postgresql.conf'), pg_config) else: pg_config = os.path.join(dest, 'postgresql.conf') if self.pg_config_mangle(pg_config, {'archive_command': 'false'}, "%s.origin" % pg_config): msg = "The archive_command was set to 'false' to prevent data losses." yield msg _logger.info(msg) # Find dangerous options in the configuration file (locations) clashes = self.pg_config_detect_possible_issues(pg_config) if remote_command: recovery = rsync.from_file_list(['postgresql.conf', 'postgresql.conf.origin'], tempdir, ':%s' % dest) shutil.rmtree(tempdir) yield "" yield "Your PostgreSQL server has been successfully prepared for recovery!" yield "" yield "Please review network and archive related settings in the PostgreSQL" yield "configuration file before starting the just recovered instance." yield "" if clashes: yield "WARNING: Before starting up the recovered PostgreSQL server," yield "please review also the settings of the following configuration" yield "options as they might interfere with your current recovery attempt:" yield "" for name, value in sorted(clashes.items()): yield " %s = %s" % (name, value) yield "" _logger.info("Recovery completed successful.")
def recover(self, backup, dest, tablespaces, target_tli, target_time, target_xid, exclusive, remote_command): ''' Performs a recovery of a backup :param backup: the backup to recover :param dest: the destination directory :param tablespaces: a dictionary of tablespaces :param target_tli: the target timeline :param target_time: the target time :param target_xid: the target xid :param exclusive: whether the recovery is exlusive or not :param remote_command: default None. The remote command to recover the base backup, in case of remote backup. ''' for line in self.cron(False): yield line recovery_dest = 'local' if remote_command: recovery_dest = 'remote' rsync = RsyncPgData(ssh=remote_command) msg = "Starting %s restore for server %s using backup %s " % ( recovery_dest, self.config.name, backup.backup_id) yield msg _logger.info(msg) msg = "Destination directory: %s" % dest yield msg _logger.info(msg) if backup.tablespaces: if remote_command: # TODO: remote dir preparation msg = "Skipping remote directory preparation, you must have done it by yourself." yield msg _logger.warning(msg) else: tblspc_dir = os.path.join(dest, 'pg_tblspc') if not os.path.exists(tblspc_dir): os.makedirs(tblspc_dir) for name, oid, location in backup.tablespaces: try: if name in tablespaces: location = tablespaces[name] tblspc_file = os.path.join(tblspc_dir, str(oid)) if os.path.exists(tblspc_file): os.unlink(tblspc_file) if os.path.exists( location) and not os.path.isdir(location): os.unlink(location) if not os.path.exists(location): os.makedirs(location) # test permissiones barman_write_check_file = os.path.join( location, '.barman_write_check') file(barman_write_check_file, 'a').close() os.unlink(barman_write_check_file) os.symlink(location, tblspc_file) except: msg = "ERROR: unable to prepare '%s' tablespace (destination '%s')" % ( name, location) _logger.critical(msg) raise SystemExit(msg) yield "\t%s, %s, %s" % (oid, name, location) target_epoch = None if target_time: try: target_datetime = dateutil.parser.parse(target_time) except: msg = "ERROR: unable to parse the target time parameter %r" % target_time _logger.critical(msg) raise SystemExit(msg) target_epoch = time.mktime(target_datetime.timetuple()) + ( target_datetime.microsecond / 1000000.) if target_time or target_xid or (target_tli and target_tli != backup.timeline): targets = {} if target_time: targets['time'] = str(target_datetime) if target_xid: targets['xid'] = str(target_xid) if target_tli and target_tli != backup.timeline: targets['timeline'] = str(target_tli) yield "Doing PITR. Recovery target %s" % \ (", ".join(["%s: %r" % (k, v) for k, v in targets.items()])) # Copy the base backup msg = "Copying the base backup." yield msg _logger.info(msg) self.recover_basebackup_copy(backup, dest, remote_command) _logger.info("Base backup copied.") # Prepare WAL segments local directory msg = "Copying required wal segments." _logger.info(msg) yield msg if target_time or target_xid or (target_tli and target_tli != backup.timeline): wal_dest = os.path.join(dest, 'barman_xlog') else: wal_dest = os.path.join(dest, 'pg_xlog') # Retrieve the list of required WAL segments according to recovery options xlogs = {} required_xlog_files = tuple( self.server.get_required_xlog_files(backup, target_tli, target_epoch, target_xid)) for filename in required_xlog_files: hashdir = xlog.hash_dir(filename) if hashdir not in xlogs: xlogs[hashdir] = [] xlogs[hashdir].append(filename) # Check decompression options decompressor = self.compression_manager.get_decompressor() # Restore WAL segments self.recover_xlog_copy(decompressor, xlogs, wal_dest, remote_command) _logger.info("Wal segmets copied.") # Generate recovery.conf file (only if needed by PITR) if target_time or target_xid or (target_tli and target_tli != backup.timeline): msg = "Generating recovery.conf" yield msg _logger.info(msg) if remote_command: tempdir = tempfile.mkdtemp(prefix='barman_recovery-') recovery = open(os.path.join(tempdir, 'recovery.conf'), 'w') else: recovery = open(os.path.join(dest, 'recovery.conf'), 'w') print >> recovery, "restore_command = 'cp barman_xlog/%f %p'" print >> recovery, "recovery_end_command = 'rm -fr barman_xlog'" if target_time: print >> recovery, "recovery_target_time = '%s'" % target_time if target_tli: print >> recovery, "recovery_target_timeline = %s" % target_tli if target_xid: print >> recovery, "recovery_target_xid = '%s'" % target_xid if exclusive: print >> recovery, "recovery_target_inclusive = '%s'" % ( not exclusive) recovery.close() if remote_command: recovery = rsync.from_file_list(['recovery.conf'], tempdir, ':%s' % dest) shutil.rmtree(tempdir) _logger.info('recovery.conf generated') else: # avoid shipping of just recovered pg_xlog files if remote_command: status_dir = tempfile.mkdtemp(prefix='barman_xlog_status-') else: status_dir = os.path.join(wal_dest, 'archive_status') os.makedirs(status_dir) # no need to check, it must not exist for filename in required_xlog_files: with file(os.path.join(status_dir, "%s.done" % filename), 'a') as f: f.write('') if remote_command: retval = rsync( '%s/' % status_dir, ':%s' % os.path.join(wal_dest, 'archive_status')) if retval != 0: msg = "WARNING: unable to populate pg_xlog/archive_status dorectory" yield msg _logger.warning(msg) shutil.rmtree(status_dir) # Disable dangerous setting in the target data dir if remote_command: tempdir = tempfile.mkdtemp(prefix='barman_recovery-') pg_config = os.path.join(tempdir, 'postgresql.conf') shutil.copy2( os.path.join(backup.get_basebackup_directory(), 'pgdata', 'postgresql.conf'), pg_config) else: pg_config = os.path.join(dest, 'postgresql.conf') if self.pg_config_mangle(pg_config, {'archive_command': 'false'}, "%s.origin" % pg_config): msg = "The archive_command was set to 'false' to prevent data losses." yield msg _logger.info(msg) if remote_command: recovery = rsync.from_file_list( ['postgresql.conf', 'postgresql.conf.origin'], tempdir, ':%s' % dest) shutil.rmtree(tempdir) # Found dangerous options in the configuration file (locations) clashes = self.pg_config_detect_possible_issues(pg_config) yield "" yield "Your PostgreSQL server has been successfully prepared for recovery!" yield "" yield "Please review network and archive related settings in the PostgreSQL" yield "configuration file before starting the just recovered instance." yield "" if clashes: yield "WARNING: Before starting up the recovered PostgreSQL server," yield "please review the also settings of the following configuration" yield "options as they might interfere with your current recovery attempt:" yield "" for name, value in sorted(clashes.items()): yield " %s = %s" % (name, value) yield "" _logger.info("Recovery completed successful.")
def xlog_copy(self, required_xlog_files, wal_dest, remote_command): """ Restore WAL segments :param required_xlog_files: list of all required WAL files :param wal_dest: the destination directory for xlog recover :param remote_command: default None. The remote command to recover the xlog, in case of remote backup. """ # Retrieve the list of required WAL segments # according to recovery options xlogs = {} for wal_info in required_xlog_files: hashdir = xlog.hash_dir(wal_info.name) if hashdir not in xlogs: xlogs[hashdir] = [] xlogs[hashdir].append(wal_info.name) # Check decompression options compressor = self.backup_manager.compression_manager.get_compressor() rsync = RsyncPgData( ssh=remote_command, bwlimit=self.config.bandwidth_limit, network_compression=self.config.network_compression) if remote_command: # If remote recovery tell rsync to copy them remotely # add ':' prefix to mark it as remote # add '/' suffix to ensure it is a directory wal_dest = ':%s/' % wal_dest else: # we will not use rsync: destdir must exists mkpath(wal_dest) if compressor and remote_command: xlog_spool = tempfile.mkdtemp(prefix='barman_xlog-') total_wals = sum(map(len, xlogs.values())) partial_count = 0 for prefix in sorted(xlogs): batch_len = len(xlogs[prefix]) partial_count += batch_len source_dir = os.path.join(self.config.wals_directory, prefix) _logger.info( "Starting copy of %s WAL files %s/%s from %s to %s", batch_len, partial_count, total_wals, xlogs[prefix][0], xlogs[prefix][-1]) if compressor: if remote_command: for segment in xlogs[prefix]: compressor.decompress(os.path.join(source_dir, segment), os.path.join(xlog_spool, segment)) try: rsync.from_file_list(xlogs[prefix], xlog_spool, wal_dest) except CommandFailedException, e: msg = "data transfer failure while copying WAL files " \ "to directory '%s'" % (wal_dest[1:],) raise DataTransferFailure.from_rsync_error(e, msg) # Cleanup files after the transfer for segment in xlogs[prefix]: file_name = os.path.join(xlog_spool, segment) try: os.unlink(file_name) except OSError as e: output.warning( "Error removing temporary file '%s': %s", file_name, e) else: # decompress directly to the right place for segment in xlogs[prefix]: compressor.decompress(os.path.join(source_dir, segment), os.path.join(wal_dest, segment)) else: try: rsync.from_file_list( xlogs[prefix], "%s/" % os.path.join( self.config.wals_directory, prefix), wal_dest) except CommandFailedException, e: msg = "data transfer failure while copying WAL files " \ "to directory '%s'" % (wal_dest[1:],) raise DataTransferFailure.from_rsync_error(e, msg)
def _generate_recovery_conf(self, recovery_info, backup_info, dest, exclusive, remote_command, target_name, target_time, target_tli, target_xid): """ Generate a recovery.conf file for PITR containing all the required configurations :param dict recovery_info: Dictionary containing all the recovery parameters :param barman.infofile.BackupInfo backup_info: representation of a backup :param str dest: destination directory of the recovery :param boolean exclusive: exclusive backup or concurrent :param str remote_command: ssh command for remote connection :param str target_name: recovery target name for PITR :param str target_time: recovery target time for PITR :param str target_tli: recovery target timeline for PITR :param str target_xid: recovery target transaction id for PITR """ if remote_command: recovery = open(os.path.join(recovery_info['tempdir'], 'recovery.conf'), 'w') else: recovery = open(os.path.join(dest, 'recovery.conf'), 'w') # If GET_WAL has been set, use the get-wal command to retrieve the # required wal files. Otherwise use the unix command "cp" to copy # them from the barman_xlog directory if recovery_info['get_wal']: # We need to create the right restore command. # If we are doing a remote recovery, # the barman-cli package is REQUIRED on the server that is hosting # the PostgreSQL server. # We use the machine FQDN and the barman_user # setting to call the barman-wal-restore correctly. # If local recovery, we use barman directly, assuming # the postgres process will be executed with the barman user. # It MUST to be reviewed by the user in any case. if remote_command: fqdn = socket.getfqdn() print("# The 'barman-wal-restore' command " "is provided in the 'barman-cli' package", file=recovery) print("restore_command = 'barman-wal-restore -U %s " "%s %s %%f %%p'" % (self.config.config.user, fqdn, self.config.name), file=recovery) else: print("# The 'barman get-wal' command " "must run as '%s' user" % self.config.config.user, file=recovery) print("restore_command = 'sudo -u %s " "barman get-wal %s %%f > %%p'" % ( self.config.config.user, self.config.name), file=recovery) recovery_info['results']['get_wal'] = True else: print("restore_command = 'cp barman_xlog/%f %p'", file=recovery) if backup_info.version >= 80400 and \ not recovery_info['get_wal']: print("recovery_end_command = 'rm -fr barman_xlog'", file=recovery) if target_time: print("recovery_target_time = '%s'" % target_time, file=recovery) if target_tli: print("recovery_target_timeline = %s" % target_tli, file=recovery) if target_xid: print("recovery_target_xid = '%s'" % target_xid, file=recovery) if target_name: print("recovery_target_name = '%s'" % target_name, file=recovery) if (target_xid or target_time) and exclusive: print("recovery_target_inclusive = '%s'" % ( not exclusive), file=recovery) recovery.close() if remote_command: plain_rsync = RsyncPgData( path=self.server.path, ssh=remote_command, bwlimit=self.config.bandwidth_limit, network_compression=self.config.network_compression) try: plain_rsync.from_file_list(['recovery.conf'], recovery_info['tempdir'], ':%s' % dest) except CommandFailedException as e: output.error('remote copy of recovery.conf failed: %s', e) output.close_and_exit()