def get_required_xlog_files(self, backup, target_tli=None, target_time=None, target_xid=None): """ Get the xlog files required for a recovery """ begin = backup.begin_wal end = backup.end_wal # If timeline isn't specified, assume it is the same timeline # of the backup if not target_tli: target_tli, _, _ = xlog.decode_segment_name(end) with self.xlogdb() as fxlogdb: for line in fxlogdb: wal_info = WalFileInfo.from_xlogdb_line(line) # Handle .history files: add all of them to the output, # regardless of their age if xlog.is_history_file(wal_info.name): yield wal_info continue if wal_info.name < begin: continue tli, _, _ = xlog.decode_segment_name(wal_info.name) if tli > target_tli: continue yield wal_info if wal_info.name > end: end = wal_info.name if target_time and target_time < wal_info.time: break # return all the remaining history files for line in fxlogdb: wal_info = WalFileInfo.from_xlogdb_line(line) if xlog.is_history_file(wal_info.name): yield wal_info
def test_recover_xlog(self, rsync_pg_mock, tmpdir): """ Test the recovery of the xlogs of a backup :param rsync_pg_mock: Mock rsync object for the purpose if this test """ # Build basic folders/files structure dest = tmpdir.mkdir('destination') wals = tmpdir.mkdir('wals') xlog_dir = wals.mkdir(xlog.hash_dir('000000000000000000000002')) xlog_file = xlog_dir.join('000000000000000000000002') xlog_file.write('dummy content') server = testing_helpers.build_real_server( main_conf={'wals_directory': wals.strpath}) # build executor executor = RecoveryExecutor(server.backup_manager) required_wals = (WalFileInfo.from_xlogdb_line( '000000000000000000000002\t42\t43\tNone\n'),) executor.xlog_copy(required_wals, dest.strpath, None) # check for a correct invocation of rsync using local paths rsync_pg_mock.from_file_list.assert_called_once( ['000000000000000000000002'], xlog_dir.strpath, dest.strpath) # reset mock calls rsync_pg_mock.reset_mock() required_wals = (WalFileInfo.from_xlogdb_line( '000000000000000000000002\t42\t43\tNone\n'),) executor.backup_manager.compression_manager = Mock() executor.xlog_copy(required_wals, dest.strpath, 'remote_command') # check for the invocation of rsync on a remote call rsync_pg_mock.assert_called_once(network_compression=False, bwlimit=None, ssh='remote_command')
def test_get_wal_info(self, get_wal_mock, tmpdir): """ Basic test for get_wal_info method Test the wals per second and total time in seconds values. :return: """ # Build a test server with a test path server = build_real_server(global_conf={ 'barman_home': tmpdir.strpath }) # Mock method get_wal_until_next_backup for returning a list of # 3 fake WAL. the first one is the start and stop WAL of the backup wal_list = [ WalFileInfo.from_xlogdb_line( "000000010000000000000002\t16777216\t1434450086.53\tNone\n"), WalFileInfo.from_xlogdb_line( "000000010000000000000003\t16777216\t1434450087.54\tNone\n"), WalFileInfo.from_xlogdb_line( "000000010000000000000004\t16777216\t1434450088.55\tNone\n")] get_wal_mock.return_value = wal_list backup_info = build_test_backup_info( server=server, begin_wal=wal_list[0].name, end_wal=wal_list[0].name) backup_info.save() # Evaluate total time in seconds: # last_wal_timestamp - first_wal_timestamp wal_total_seconds = wal_list[-1].time - wal_list[0].time # Evaluate the wals_per_second value: # wals_in_backup + wals_until_next_backup / total_time_in_seconds wals_per_second = len(wal_list) / wal_total_seconds wal_info = server.get_wal_info(backup_info) assert wal_info assert wal_info['wal_total_seconds'] == wal_total_seconds assert wal_info['wals_per_second'] == wals_per_second
def test_recover_xlog(self, rsync_pg_mock, cm_mock, tmpdir): """ Test the recovery of the xlogs of a backup :param rsync_pg_mock: Mock rsync object for the purpose if this test """ # Build basic folders/files structure dest = tmpdir.mkdir("destination") wals = tmpdir.mkdir("wals") # Create 3 WAL files with different compressions xlog_dir = wals.mkdir(xlog.hash_dir("000000000000000000000002")) xlog_plain = xlog_dir.join("000000000000000000000001") xlog_gz = xlog_dir.join("000000000000000000000002") xlog_bz2 = xlog_dir.join("000000000000000000000003") xlog_plain.write("dummy content") xlog_gz.write("dummy content gz") xlog_bz2.write("dummy content bz2") server = testing_helpers.build_real_server(main_conf={"wals_directory": wals.strpath}) # Prepare compressors mock c = {"gzip": mock.Mock(name="gzip"), "bzip2": mock.Mock(name="bzip2")} cm_mock.return_value.get_compressor = lambda compression=None, path=None: c[compression] # touch destination files to avoid errors on cleanup c["gzip"].decompress.side_effect = lambda src, dst: open(dst, "w") c["bzip2"].decompress.side_effect = lambda src, dst: open(dst, "w") # Build executor executor = RecoveryExecutor(server.backup_manager) # Test: local copy required_wals = ( WalFileInfo.from_xlogdb_line("000000000000000000000001\t42\t43\tNone\n"), WalFileInfo.from_xlogdb_line("000000000000000000000002\t42\t43\tgzip\n"), WalFileInfo.from_xlogdb_line("000000000000000000000003\t42\t43\tbzip2\n"), ) executor._xlog_copy(required_wals, dest.strpath, None) # Check for a correct invocation of rsync using local paths rsync_pg_mock.assert_called_once_with(network_compression=False, bwlimit=None, path=None, ssh=None) assert not rsync_pg_mock.return_value.from_file_list.called c["gzip"].decompress.assert_called_once_with(xlog_gz.strpath, mock.ANY) c["bzip2"].decompress.assert_called_once_with(xlog_bz2.strpath, mock.ANY) # Reset mock calls rsync_pg_mock.reset_mock() c["gzip"].reset_mock() c["bzip2"].reset_mock() # Test: remote copy executor._xlog_copy(required_wals, dest.strpath, "remote_command") # Check for the invocation of rsync on a remote call rsync_pg_mock.assert_called_once_with( network_compression=False, bwlimit=None, path=mock.ANY, ssh="remote_command" ) rsync_pg_mock.return_value.from_file_list.assert_called_once_with( ["000000000000000000000001", "000000000000000000000002", "000000000000000000000003"], mock.ANY, mock.ANY ) c["gzip"].decompress.assert_called_once_with(xlog_gz.strpath, mock.ANY) c["bzip2"].decompress.assert_called_once_with(xlog_bz2.strpath, mock.ANY)
def last_wal_age(server, args): warn = args.warning crit = args.critical from barman.infofile import WalFileInfo with server.xlogdb() as fxlogdb: line = None for line in fxlogdb: pass if line is None: critical("No WAL received yet.") #name, size, time, compression = server.xlogdb_parse_line(line) wal_info = WalFileInfo.from_xlogdb_line(line) time = datetime.fromtimestamp(wal_info.time) now = datetime.now() age = now - time minutes = age.seconds / 60 minutes = minutes + age.days * 60 * 24 exit_check(minutes, warn, crit, "Last WAL is %s minutes old." % minutes, perfdata_key="minutes", perfdata_min=0)
def missing_wals(server, args): warn = args.warning crit = args.critical from barman.xlog import is_wal_file from barman.infofile import WalFileInfo wals_directory = server.config.wals_directory missing_wals = 0 with server.xlogdb() as fxlogdb: for line in fxlogdb: #name, size, time, compression = server.xlogdb_parse_line(line) wal_info = WalFileInfo.from_xlogdb_line(line) name = wal_info.name directory = name[0:16] if is_wal_file(name): file_path = os.path.join(wals_directory, directory, name) if not os.path.exists(file_path): missing_wals = missing_wals + 1 exit_check(missing_wals, warn, crit, "There are %d missing wals for the last backup." % missing_wals, perfdata_key="missing", perfdata_min=0)
def remove_wal_before_backup(self, backup_info): """ Remove WAL files which have been archived before the start of the provided backup. If no backup_info is provided delete all available WAL files :param BackupInfo|None backup_info: the backup information structure :return list: a list of removed WAL files """ removed = [] with self.server.xlogdb() as fxlogdb: xlogdb_new = fxlogdb.name + ".new" with open(xlogdb_new, 'w') as fxlogdb_new: for line in fxlogdb: wal_info = WalFileInfo.from_xlogdb_line(line) # Keeps the WAL segment if it is a history file or later # than the given backup (the first available) if (xlog.is_history_file(wal_info.name) or (backup_info and wal_info.name >= backup_info.begin_wal)): fxlogdb_new.write(wal_info.to_xlogdb_line()) continue else: self.delete_wal(wal_info) removed.append(wal_info.name) fxlogdb_new.flush() os.fsync(fxlogdb_new.fileno()) shutil.move(xlogdb_new, fxlogdb.name) fsync_dir(os.path.dirname(fxlogdb.name)) return removed
def get_wal_until_next_backup(self, backup, include_history=False): """ Get the xlog files between backup and the next :param BackupInfo backup: a backup object, the starting point to retrieve WALs :param bool include_history: option for the inclusion of include_history files into the output """ begin = backup.begin_wal next_end = None if self.get_next_backup(backup.backup_id): next_end = self.get_next_backup(backup.backup_id).end_wal backup_tli, _, _ = xlog.decode_segment_name(begin) with self.xlogdb() as fxlogdb: for line in fxlogdb: wal_info = WalFileInfo.from_xlogdb_line(line) # Handle .history files: add all of them to the output, # regardless of their age, if requested (the 'include_history' # parameter is True) if xlog.is_history_file(wal_info.name): if include_history: yield wal_info continue if wal_info.name < begin: continue tli, _, _ = xlog.decode_segment_name(wal_info.name) if tli > backup_tli: continue if not xlog.is_wal_file(wal_info.name): continue if next_end and wal_info.name > next_end: break yield wal_info
def remove_wal_before_backup(self, backup_info, timelines_to_protect=None): """ Remove WAL files which have been archived before the start of the provided backup. If no backup_info is provided delete all available WAL files If timelines_to_protect list is passed, never remove a wal in one of these timelines. :param BackupInfo|None backup_info: the backup information structure :param set timelines_to_protect: optional list of timelines to protect :return list: a list of removed WAL files """ removed = [] with self.server.xlogdb("r+") as fxlogdb: xlogdb_dir = os.path.dirname(fxlogdb.name) with tempfile.TemporaryFile(mode="w+", dir=xlogdb_dir) as fxlogdb_new: for line in fxlogdb: wal_info = WalFileInfo.from_xlogdb_line(line) if not xlog.is_any_xlog_file(wal_info.name): output.error( "invalid WAL segment name %r\n" 'HINT: Please run "barman rebuild-xlogdb %s" ' "to solve this issue", wal_info.name, self.config.name, ) continue # Keeps the WAL segment if it is a history file keep = xlog.is_history_file(wal_info.name) # Keeps the WAL segment if its timeline is in # `timelines_to_protect` if timelines_to_protect: tli, _, _ = xlog.decode_segment_name(wal_info.name) keep |= tli in timelines_to_protect # Keeps the WAL segment if it is a newer # than the given backup (the first available) if backup_info and backup_info.begin_wal is not None: keep |= wal_info.name >= backup_info.begin_wal # If the file has to be kept write it in the new xlogdb # otherwise delete it and record it in the removed list if keep: fxlogdb_new.write(wal_info.to_xlogdb_line()) else: self.delete_wal(wal_info) removed.append(wal_info.name) fxlogdb_new.flush() fxlogdb_new.seek(0) fxlogdb.seek(0) shutil.copyfileobj(fxlogdb_new, fxlogdb) fxlogdb.truncate() return removed
def remove_wal_before_backup(self, backup_info, timelines_to_protect=None): """ Remove WAL files which have been archived before the start of the provided backup. If no backup_info is provided delete all available WAL files If timelines_to_protect list is passed, never remove a wal in one of these timelines. :param BackupInfo|None backup_info: the backup information structure :param set timelines_to_protect: optional list of timelines to protect :return list: a list of removed WAL files """ removed = [] with self.server.xlogdb() as fxlogdb: xlogdb_new = fxlogdb.name + ".new" with open(xlogdb_new, 'w') as fxlogdb_new: for line in fxlogdb: wal_info = WalFileInfo.from_xlogdb_line(line) if not xlog.is_any_xlog_file(wal_info.name): output.error( "invalid xlog segment name %r\n" "HINT: Please run \"barman rebuild-xlogdb %s\" " "to solve this issue", wal_info.name, self.config.name) continue # Keeps the WAL segment if it is a history file keep = xlog.is_history_file(wal_info.name) # Keeps the WAL segment if its timeline is in # `timelines_to_protect` if timelines_to_protect: tli, _, _ = xlog.decode_segment_name(wal_info.name) keep |= tli in timelines_to_protect # Keeps the WAL segment if it is a newer # than the given backup (the first available) if backup_info: keep |= wal_info.name >= backup_info.begin_wal # If the file has to be kept write it in the new xlogdb # otherwise delete it and record it in the removed list if keep: fxlogdb_new.write(wal_info.to_xlogdb_line()) else: self.delete_wal(wal_info) removed.append(wal_info.name) fxlogdb_new.flush() os.fsync(fxlogdb_new.fileno()) shutil.move(xlogdb_new, fxlogdb.name) fsync_dir(os.path.dirname(fxlogdb.name)) return removed
def test_from_xlogdb_line(self): """ Test the conversion from a string to a WalFileInfo file """ # build a WalFileInfo object wfile_info = WalFileInfo() wfile_info.name = "000000000000000000000001" wfile_info.size = 42 wfile_info.time = 43 wfile_info.compression = None assert wfile_info.relpath() == ("0000000000000000/000000000000000000000001") # mock a server object server = mock.Mock(name="server") server.config.wals_directory = "/tmp/wals" # parse the string info_file = wfile_info.from_xlogdb_line("000000000000000000000001\t42\t43\tNone\n") assert list(wfile_info.items()) == list(info_file.items())
def test_from_xlogdb_line(self): """ Test the conversion from a string to a WalFileInfo file """ # build a WalFileInfo object wfile_info = WalFileInfo() wfile_info.name = '000000000000000000000001' wfile_info.size = 42 wfile_info.time = 43 wfile_info.compression = None assert wfile_info.relpath() == ( '0000000000000000/000000000000000000000001') # mock a server object server = mock.Mock(name='server') server.config.wals_directory = '/tmp/wals' # parse the string info_file = wfile_info.from_xlogdb_line( '000000000000000000000001\t42\t43\tNone\n') assert list(wfile_info.items()) == list(info_file.items())
def remove_wal_before_backup(self, backup_info): """ Remove WAL files which have been archived before the start of the provided backup. If no backup_info is provided delete all available WAL files :param BackupInfo|None backup_info: the backup information structure :return list: a list of removed WAL files """ removed = [] with self.server.xlogdb() as fxlogdb: xlogdb_new = fxlogdb.name + ".new" with open(xlogdb_new, 'w') as fxlogdb_new: for line in fxlogdb: wal_info = WalFileInfo.from_xlogdb_line(line) if not xlog.is_any_xlog_file(wal_info.name): output.error( "invalid xlog segment name %r\n" "HINT: Please run \"barman rebuild-xlogdb %s\" " "to solve this issue", wal_info.name, self.config.name) continue # Keeps the WAL segment if it is a history file or later # than the given backup (the first available) if (xlog.is_history_file(wal_info.name) or (backup_info and wal_info.name >= backup_info.begin_wal)): fxlogdb_new.write(wal_info.to_xlogdb_line()) continue else: self.delete_wal(wal_info) removed.append(wal_info.name) fxlogdb_new.flush() os.fsync(fxlogdb_new.fileno()) shutil.move(xlogdb_new, fxlogdb.name) fsync_dir(os.path.dirname(fxlogdb.name)) return removed
def test_recover_xlog(self, rsync_pg_mock, cm_mock, tmpdir): """ Test the recovery of the xlogs of a backup :param rsync_pg_mock: Mock rsync object for the purpose if this test """ # Build basic folders/files structure dest = tmpdir.mkdir('destination') wals = tmpdir.mkdir('wals') # Create 3 WAL files with different compressions xlog_dir = wals.mkdir(xlog.hash_dir('000000000000000000000002')) xlog_plain = xlog_dir.join('000000000000000000000001') xlog_gz = xlog_dir.join('000000000000000000000002') xlog_bz2 = xlog_dir.join('000000000000000000000003') xlog_plain.write('dummy content') xlog_gz.write('dummy content gz') xlog_bz2.write('dummy content bz2') server = testing_helpers.build_real_server( main_conf={'wals_directory': wals.strpath}) # Prepare compressors mock c = { 'gzip': Mock(name='gzip'), 'bzip2': Mock(name='bzip2'), } cm_mock.return_value.get_compressor = \ lambda compression=None, path=None: c[compression] # touch destination files to avoid errors on cleanup c['gzip'].decompress.side_effect = lambda src, dst: open(dst, 'w') c['bzip2'].decompress.side_effect = lambda src, dst: open(dst, 'w') # Build executor executor = RecoveryExecutor(server.backup_manager) # Test: local copy required_wals = ( WalFileInfo.from_xlogdb_line( '000000000000000000000001\t42\t43\tNone\n'), WalFileInfo.from_xlogdb_line( '000000000000000000000002\t42\t43\tgzip\n'), WalFileInfo.from_xlogdb_line( '000000000000000000000003\t42\t43\tbzip2\n'), ) executor.xlog_copy(required_wals, dest.strpath, None) # Check for a correct invocation of rsync using local paths rsync_pg_mock.assert_called_once_with( network_compression=False, bwlimit=None, path=None, ssh=None) assert not rsync_pg_mock.return_value.from_file_list.called c['gzip'].decompress.assert_called_once_with(xlog_gz.strpath, ANY) c['bzip2'].decompress.assert_called_once_with(xlog_bz2.strpath, ANY) # Reset mock calls rsync_pg_mock.reset_mock() c['gzip'].reset_mock() c['bzip2'].reset_mock() # Test: remote copy executor.xlog_copy(required_wals, dest.strpath, 'remote_command') # Check for the invocation of rsync on a remote call rsync_pg_mock.assert_called_once_with( network_compression=False, bwlimit=None, path=ANY, ssh='remote_command') rsync_pg_mock.return_value.from_file_list.assert_called_once_with( [ '000000000000000000000001', '000000000000000000000002', '000000000000000000000003'], ANY, ANY) c['gzip'].decompress.assert_called_once_with(xlog_gz.strpath, ANY) c['bzip2'].decompress.assert_called_once_with(xlog_bz2.strpath, ANY)
def test_recover_xlog(self, rsync_pg_mock, cm_mock, tmpdir): """ Test the recovery of the xlogs of a backup :param rsync_pg_mock: Mock rsync object for the purpose if this test """ # Build basic folders/files structure dest = tmpdir.mkdir('destination') wals = tmpdir.mkdir('wals') # Create 3 WAL files with different compressions xlog_dir = wals.mkdir(xlog.hash_dir('000000000000000000000002')) xlog_plain = xlog_dir.join('000000000000000000000001') xlog_gz = xlog_dir.join('000000000000000000000002') xlog_bz2 = xlog_dir.join('000000000000000000000003') xlog_plain.write('dummy content') xlog_gz.write('dummy content gz') xlog_bz2.write('dummy content bz2') server = testing_helpers.build_real_server( main_conf={'wals_directory': wals.strpath}) # Prepare compressors mock c = { 'gzip': mock.Mock(name='gzip'), 'bzip2': mock.Mock(name='bzip2'), } cm_mock.return_value.get_compressor = \ lambda compression=None, path=None: c[compression] # touch destination files to avoid errors on cleanup c['gzip'].decompress.side_effect = lambda src, dst: open(dst, 'w') c['bzip2'].decompress.side_effect = lambda src, dst: open(dst, 'w') # Build executor executor = RecoveryExecutor(server.backup_manager) # Test: local copy required_wals = ( WalFileInfo.from_xlogdb_line( '000000000000000000000001\t42\t43\tNone\n'), WalFileInfo.from_xlogdb_line( '000000000000000000000002\t42\t43\tgzip\n'), WalFileInfo.from_xlogdb_line( '000000000000000000000003\t42\t43\tbzip2\n'), ) executor._xlog_copy(required_wals, dest.strpath, None) # Check for a correct invocation of rsync using local paths rsync_pg_mock.assert_called_once_with(network_compression=False, bwlimit=None, path=None, ssh=None) assert not rsync_pg_mock.return_value.from_file_list.called c['gzip'].decompress.assert_called_once_with(xlog_gz.strpath, mock.ANY) c['bzip2'].decompress.assert_called_once_with(xlog_bz2.strpath, mock.ANY) # Reset mock calls rsync_pg_mock.reset_mock() c['gzip'].reset_mock() c['bzip2'].reset_mock() # Test: remote copy executor._xlog_copy(required_wals, dest.strpath, 'remote_command') # Check for the invocation of rsync on a remote call rsync_pg_mock.assert_called_once_with(network_compression=False, bwlimit=None, path=mock.ANY, ssh='remote_command') rsync_pg_mock.return_value.from_file_list.assert_called_once_with([ '000000000000000000000001', '000000000000000000000002', '000000000000000000000003' ], mock.ANY, mock.ANY) c['gzip'].decompress.assert_called_once_with(xlog_gz.strpath, mock.ANY) c['bzip2'].decompress.assert_called_once_with(xlog_bz2.strpath, mock.ANY)