def test_mode(self): # build a backup manager with a rsync executor (exclusive) backup_manager = build_backup_manager() # check the result of the mode property assert backup_manager.executor.mode == "rsync-exclusive" # build a backup manager with a postgres executor # (strategy without mode) backup_manager = build_backup_manager(global_conf={"backup_method": "postgres"}) # check the result of the mode property assert backup_manager.executor.mode == "postgres"
def test_streamingwalarchiver_check_receivexlog_installed( self, command_mock, which_mock): """ Test for the check method of the StreamingWalArchiver class """ backup_manager = build_backup_manager() backup_manager.server.postgres.server_txt_version = "9.2" which_mock.return_value = None archiver = StreamingWalArchiver(backup_manager) result = archiver.get_remote_status() which_mock.assert_called_with('pg_receivexlog', ANY) assert result == { "pg_receivexlog_installed": False, "pg_receivexlog_path": None, "pg_receivexlog_compatible": None, "pg_receivexlog_version": None, } backup_manager.server.postgres.server_txt_version = "9.2" which_mock.return_value = '/some/path/to/pg_receivexlog' command_mock.return_value.side_effect = CommandFailedException result = archiver.get_remote_status() assert result == { "pg_receivexlog_installed": True, "pg_receivexlog_path": "/some/path/to/pg_receivexlog", "pg_receivexlog_compatible": None, "pg_receivexlog_version": None, }
def test_wal_info_corner_cases(self, command_mock): # BackupManager mock backup_manager = build_backup_manager(name="test_server") backup_manager.config.pre_test_hook = "not_existent_script" # WalFileInfo mock timestamp = time.time() wal_info = MagicMock(name="wal_info") wal_info.name = "XXYYZZAABBCC" wal_info.size = 1234567 wal_info.time = timestamp wal_info.compression = None wal_info.fullpath.return_value = "/incoming/directory" # the actual test script = HookScriptRunner(backup_manager, "test_hook", "pre") script.env_from_wal_info(wal_info) expected_env = { "BARMAN_PHASE": "pre", "BARMAN_VERSION": version, "BARMAN_SERVER": "test_server", "BARMAN_CONFIGURATION": "build_config_from_dicts", "BARMAN_HOOK": "test_hook", "BARMAN_SEGMENT": "XXYYZZAABBCC", "BARMAN_FILE": "/incoming/directory", "BARMAN_SIZE": "1234567", "BARMAN_TIMESTAMP": str(timestamp), "BARMAN_COMPRESSION": "", } script.run() assert command_mock.call_count == 1 assert command_mock.call_args[1]["env_append"] == expected_env
def test_backup_info_exception(self, command_mock): # BackupManager mock backup_manager = build_backup_manager(name="test_server") backup_manager.config.pre_test_hook = "not_existent_script" backup_manager.get_previous_backup = MagicMock() backup_manager.get_previous_backup.side_effect = UnknownBackupIdException() # BackupInfo mock backup_info = MagicMock(name="backup_info") backup_info.get_basebackup_directory.return_value = "backup_directory" backup_info.backup_id = "123456789XYZ" backup_info.error = None backup_info.status = "OK" # the actual test script = HookScriptRunner(backup_manager, "test_hook", "pre") script.env_from_backup_info(backup_info) expected_env = { "BARMAN_PHASE": "pre", "BARMAN_VERSION": version, "BARMAN_SERVER": "test_server", "BARMAN_CONFIGURATION": "build_config_from_dicts", "BARMAN_HOOK": "test_hook", "BARMAN_BACKUP_DIR": "backup_directory", "BARMAN_BACKUP_ID": "123456789XYZ", "BARMAN_ERROR": "", "BARMAN_STATUS": "OK", "BARMAN_PREVIOUS_ID": "", } script.run() assert command_mock.call_count == 1 assert command_mock.call_args[1]["env_append"] == expected_env
def test_check(self, remote_status_mock): """ Very simple and basic test for the check method :param remote_status_mock: mock for the get_remote_status method """ remote_status_mock.return_value = { 'pg_basebackup_compatible': True, 'pg_basebackup_installed': True, 'pg_basebackup_path': '/fake/path', 'pg_basebackup_bwlimit': True, 'pg_basebackup_version': '9.5' } check_strat = CheckStrategy() backup_manager = build_backup_manager(global_conf={ 'backup_method': 'postgres' }) backup_manager.executor.check(check_strategy=check_strat) # No errors detected assert check_strat.has_error is not True remote_status_mock.reset_mock() remote_status_mock.return_value = { 'pg_basebackup_compatible': False, 'pg_basebackup_installed': True, 'pg_basebackup_path': True, 'pg_basebackup_bwlimit': True, 'pg_basebackup_version': '9.5' } check_strat = CheckStrategy() backup_manager.executor.check(check_strategy=check_strat) # Error present because of the 'pg_basebackup_compatible': False assert check_strat.has_error is True
def test_check(self, command_mock, capsys): """ Check the ssh connection to a remote server """ backup_manager = build_backup_manager() # Test 1: ssh ok check_strategy = CheckOutputStrategy() backup_manager.executor.check(check_strategy) out, err = capsys.readouterr() assert err == '' assert 'ssh: OK' in out # Test 2: ssh ok and PostgreSQL is not responding command_mock.reset_mock() check_strategy = CheckOutputStrategy() backup_manager.server.get_remote_status.return_value = { 'server_txt_version': None } backup_manager.server.get_backup.return_value.pgdata = 'test/' backup_manager.executor.check(check_strategy) out, err = capsys.readouterr() assert err == '' assert 'ssh: OK' in out assert "Check that the PostgreSQL server is up and no " \ "'backup_label' file is in PGDATA." # Test 3: ssh failed command_mock.reset_mock() command_mock.side_effect = FsOperationFailed backup_manager.executor.check(check_strategy) out, err = capsys.readouterr() assert err == '' assert 'ssh: FAILED' in out
def test_backup_cache_remove(self, tmpdir): """ Check the method responsible for the removal of a BackupInfo object from the backups cache """ # build a backup_manager and setup a basic configuration backup_manager = build_backup_manager( name='TestServer', global_conf={ 'barman_home': tmpdir.strpath }) assert backup_manager._backup_cache is None # Create a BackupInfo object with status DONE b_info = build_test_backup_info( backup_id='fake_backup_id', server=backup_manager.server, ) # Remove the backup from the uninitialized cache backup_manager.backup_cache_remove(b_info) # Check that the test backup is still not initialized assert backup_manager._backup_cache is None # Initialize the cache backup_manager._backup_cache = {b_info.backup_id: b_info} # Remove the backup from the cache backup_manager.backup_cache_remove(b_info) assert b_info.backup_id not in backup_manager._backup_cache
def test_backup_info_exception(self, command_mock): # BackupManager mock backup_manager = build_backup_manager(name='test_server') backup_manager.config.pre_test_hook = 'not_existent_script' backup_manager.get_previous_backup = MagicMock() backup_manager.get_previous_backup.side_effect = \ UnknownBackupIdException() # BackupInfo mock backup_info = MagicMock(name='backup_info') backup_info.get_basebackup_directory.return_value = 'backup_directory' backup_info.backup_id = '123456789XYZ' backup_info.error = None backup_info.status = 'OK' # the actual test script = HookScriptRunner(backup_manager, 'test_hook', 'pre') script.env_from_backup_info(backup_info) expected_env = { 'BARMAN_PHASE': 'pre', 'BARMAN_VERSION': version, 'BARMAN_SERVER': 'test_server', 'BARMAN_CONFIGURATION': 'build_config_from_dicts', 'BARMAN_HOOK': 'test_hook', 'BARMAN_BACKUP_DIR': 'backup_directory', 'BARMAN_BACKUP_ID': '123456789XYZ', 'BARMAN_ERROR': '', 'BARMAN_STATUS': 'OK', 'BARMAN_PREVIOUS_ID': '', 'BARMAN_RETRY': '0', } script.run() assert command_mock.call_count == 1 assert command_mock.call_args[1]['env_append'] == expected_env
def test_backup_cache_add(self, tmpdir): """ Check the method responsible for the registration of a BackupInfo obj into the backups cache """ # build a backup_manager and setup a basic configuration backup_manager = build_backup_manager( name='TestServer', global_conf={ 'barman_home': tmpdir.strpath }) # Create a BackupInfo object with status DONE b_info = build_test_backup_info( backup_id='fake_backup_id', server=backup_manager.server, ) b_info.save() assert backup_manager._backup_cache is None # Register the object to cache. The cache is not initialized, so it # must load the cache from disk. backup_manager.backup_cache_add(b_info) # Check that the test backup is in the cache assert backup_manager.get_backup(b_info.backup_id) is b_info # Initialize an empty cache backup_manager._backup_cache = {} # Add the backup again backup_manager.backup_cache_add(b_info) assert backup_manager.get_backup(b_info.backup_id) is b_info
def test_wal_info_corner_cases(self, command_mock): # BackupManager mock backup_manager = build_backup_manager(name='test_server') backup_manager.config.pre_test_hook = 'not_existent_script' # WalFileInfo mock timestamp = time.time() wal_info = MagicMock(name='wal_info') wal_info.name = 'XXYYZZAABBCC' wal_info.size = 1234567 wal_info.time = timestamp wal_info.compression = None wal_info.fullpath.return_value = '/incoming/directory' # the actual test script = HookScriptRunner(backup_manager, 'test_hook', 'pre') script.env_from_wal_info(wal_info, '/somewhere', Exception('BOOM!')) expected_env = { 'BARMAN_PHASE': 'pre', 'BARMAN_VERSION': version, 'BARMAN_SERVER': 'test_server', 'BARMAN_CONFIGURATION': 'build_config_from_dicts', 'BARMAN_HOOK': 'test_hook', 'BARMAN_SEGMENT': 'XXYYZZAABBCC', 'BARMAN_FILE': '/somewhere', 'BARMAN_SIZE': '1234567', 'BARMAN_TIMESTAMP': str(timestamp), 'BARMAN_COMPRESSION': '', 'BARMAN_RETRY': '0', 'BARMAN_ERROR': 'BOOM!', } script.run() assert command_mock.call_count == 1 assert command_mock.call_args[1]['env_append'] == expected_env
def test_retry(self, sleep_moc): """ Test the retry method :param sleep_moc: mimic the sleep timer """ # BackupManager setup backup_manager = build_backup_manager() backup_manager.config.basebackup_retry_times = 5 backup_manager.config.basebackup_retry_sleep = 10 f = Mock() # check for correct return value r = backup_manager.retry_backup_copy(f, 'test string') f.assert_called_with('test string') assert f.return_value == r # check for correct number of calls expected = Mock() f = Mock(side_effect=[DataTransferFailure('testException'), expected]) r = backup_manager.retry_backup_copy(f, 'test string') assert f.call_count == 2 # check for correct number of tries and invocations of sleep method sleep_moc.reset_mock() e = DataTransferFailure('testException') f = Mock(side_effect=[e, e, e, e, e, e]) with pytest.raises(DataTransferFailure): backup_manager.retry_backup_copy(f, 'test string') assert sleep_moc.call_count == 5 assert f.call_count == 6
def test_keyboard_interrupt(self, mock_infofile ): """ Unit test for a quick check on exception catching during backup operations Test case 1: raise a general exception, backup status in BackupInfo should be FAILED. Test case 2: raise a KeyboardInterrupt exception, simulating a user pressing CTRL + C while a backup is in progress, backup status in BackupInfo should be FAILED. """ # BackupManager setup backup_manager = build_backup_manager() instance = mock_infofile.return_value # Instruct the patched method to raise a general exception backup_manager.executor.start_backup = Mock( side_effect=Exception('abc')) # invoke backup method backup_manager.backup() # verify that mock status is FAILED assert call.set_attribute('status', 'FAILED') in instance.mock_calls # Instruct the patched method to raise a KeyboardInterrupt backup_manager.executor.start_backup = Mock( side_effect=KeyboardInterrupt()) # invoke backup method backup_manager.backup() # verify that mock status is FAILED assert call.set_attribute('status', 'FAILED') in instance.mock_calls
def test_load_backup_cache(self, tmpdir): """ Check the loading of backups inside the backup_cache """ # build a backup_manager and setup a basic configuration backup_manager = build_backup_manager( name='TestServer', global_conf={ 'barman_home': tmpdir.strpath }) # Make sure the cache is uninitialized assert backup_manager._backup_cache is None # Create a BackupInfo object with status DONE b_info = build_test_backup_info( backup_id='fake_backup_id', server=backup_manager.server, ) b_info.save() # Load backups inside the cache backup_manager._load_backup_cache() # Check that the test backup is inside the backups_cache assert backup_manager._backup_cache[b_info.backup_id].to_dict() == \ b_info.to_dict()
def test_concurrent_stop_backup(self, label_mock, stop_mock,): """ Basic test for the start_backup method :param label_mock: mimic the response of _write_backup_label :param stop_mock: mimic the response of _pgespresso_stop_backup """ # Build a backup info and configure the mocks server = build_mocked_server(main_conf={ 'backup_options': BackupOptions.CONCURRENT_BACKUP }) backup_manager = build_backup_manager(server=server) # Mock executor._pgespresso_stop_backup(backup_info) call stop_time = datetime.datetime.now() stop_mock.return_value = ("000000060000A25700000044", stop_time) backup_info = build_test_backup_info() backup_manager.executor.strategy.stop_backup(backup_info) assert backup_info.end_xlog == 'A257/45000000' assert backup_info.end_wal == '000000060000A25700000044' assert backup_info.end_offset == 0 assert backup_info.end_time == stop_time
def test_get_next_batch(self, from_file_mock, isfile_mock, glob_mock): """ Test the FileWalArchiver.get_next_batch method """ # WAL batch no errors glob_mock.return_value = ['000000010000000000000001'] isfile_mock.return_value = True # This is an hack, instead of a WalFileInfo we use a simple string to # ease all the comparisons. The resulting string is the name enclosed # in colons. e.g. ":000000010000000000000001:" from_file_mock.side_effect = lambda wal_name: ':%s:' % wal_name backup_manager = build_backup_manager( name='TestServer' ) archiver = FileWalArchiver(backup_manager) backup_manager.server.archivers = [archiver] batch = archiver.get_next_batch() assert [':000000010000000000000001:'] == batch # WAL batch with errors wrong_file_name = 'test_wrong_wal_file.2' glob_mock.return_value = ['test_wrong_wal_file.2'] batch = archiver.get_next_batch() assert [wrong_file_name] == batch.errors
def test_pgespresso_start_backup(self): """ Simple test for _pgespresso_start_backup method of the RsyncBackupExecutor class """ # Build and configure a server using a mock server = build_mocked_server(main_conf={ 'backup_options': BackupOptions.CONCURRENT_BACKUP }) backup_manager = build_backup_manager(server=server) backup_label = 'test label' # Expect an exception because pgespresso is not installed backup_manager.server.pgespresso_installed.return_value = False with pytest.raises(Exception): backup_manager.executor.pgespresso_start_backup(backup_label) # Report pgespresso installed. Expect no error and the correct call # sequence backup_manager.server.reset_mock() backup_manager.executor.server.pgespresso_installed.return_value = True backup_manager.executor.strategy._pgespresso_start_backup(backup_label) pg_connect = mock.call.pg_connect() with_pg_connect = pg_connect.__enter__() cursor = with_pg_connect.cursor() assert backup_manager.server.mock_calls == [ pg_connect, with_pg_connect, pg_connect.__enter__().rollback(), cursor, cursor.execute(ANY, ANY), cursor.fetchone(), pg_connect.__exit__(None, None, None)]
def test_exclusive_stop_backup(self, stop_mock): """ Basic test for the start_backup method :param stop_mock: mimic the response od _pg_stop_backup """ # Build a backup info and configure the mocks server = build_mocked_server(main_conf={ 'backup_options': BackupOptions.EXCLUSIVE_BACKUP }) backup_manager = build_backup_manager(server=server) # Mock executor._pg_stop_backup(backup_info) call stop_time = datetime.datetime.now() stop_mock.return_value = ("266/4A9C1EF8", "00000010000002660000004A", 10231544, stop_time) backup_info = build_test_backup_info() backup_manager.executor.strategy.stop_backup(backup_info) # check that the submitted values are stored inside the BackupInfo obj assert backup_info.end_xlog == '266/4A9C1EF8' assert backup_info.end_wal == '00000010000002660000004A' assert backup_info.end_offset == 10231544 assert backup_info.end_time == stop_time
def test_backup_copy(self, rsync_mock, tmpdir): """ Test the execution of a rsync copy :param rsync_mock: mock for the rsync command :param tmpdir: temporary dir """ backup_manager = build_backup_manager(global_conf={ 'barman_home': tmpdir.mkdir('home').strpath }) backup_info = build_test_backup_info( server=backup_manager.server, pgdata="/pg/data", config_file="/etc/postgresql.conf", hba_file="/pg/data/pg_hba.conf", ident_file="/pg/data/pg_ident.conf", begin_xlog="0/2000028", begin_wal="000000010000000000000002", begin_offset=28) backup_info.save() # This is to check that all the preparation is done correctly assert os.path.exists(backup_info.filename) backup_manager.executor.backup_copy(backup_info) assert rsync_mock.mock_calls == [ mock.call(check=True, network_compression=False, args=[], bwlimit=None, ssh='ssh', ssh_options=['-c', '"arcfour"', '-p', '22', '*****@*****.**', '-o', 'BatchMode=yes', '-o', 'StrictHostKeyChecking=no']), mock.call().smart_copy(':/fake/location/', backup_info.get_data_directory(16387), None, None), mock.call(check=True, network_compression=False, args=[], bwlimit=None, ssh='ssh', ssh_options=['-c', '"arcfour"', '-p', '22', '*****@*****.**', '-o', 'BatchMode=yes', '-o', 'StrictHostKeyChecking=no']), mock.call().smart_copy(':/another/location/', backup_info.get_data_directory(16405), None, None), mock.call(network_compression=False, exclude_and_protect=['/pg_tblspc/16387', '/pg_tblspc/16405'], args=[], bwlimit=None, ssh='ssh', ssh_options=['-c', '"arcfour"', '-p', '22', '*****@*****.**', '-o', 'BatchMode=yes', '-o', 'StrictHostKeyChecking=no']), mock.call().smart_copy(':/pg/data/', backup_info.get_data_directory(), None, None), mock.call()( ':/pg/data/global/pg_control', '%s/global/pg_control' % backup_info.get_data_directory()), mock.call()(':/etc/postgresql.conf', backup_info.get_data_directory())]
def test_backup_copy_with_included_files(self, rsync_moc, tmpdir, capsys): backup_manager = build_backup_manager(global_conf={ 'barman_home': tmpdir.mkdir('home').strpath }) # Create a backup info with additional configuration files backup_info = build_test_backup_info( server=backup_manager.server, pgdata="/pg/data", config_file="/etc/postgresql.conf", hba_file="/pg/data/pg_hba.conf", ident_file="/pg/data/pg_ident.conf", begin_xlog="0/2000028", begin_wal="000000010000000000000002", included_files=["/tmp/config/file.conf"], begin_offset=28) backup_info.save() # This is to check that all the preparation is done correctly assert os.path.exists(backup_info.filename) # Execute a backup backup_manager.executor.backup_copy(backup_info) out, err = capsys.readouterr() # check for the presence of the warning in the stderr assert "WARNING: The usage of include directives is not supported" in err # check that the additional configuration file is present in the output assert backup_info.included_files[0] in err
def test_map_temporary_config_files(self, tmpdir): """ Test the method that prepares configuration files for the final steps of a recovery """ # Build directory/files structure for testing tempdir = tmpdir.mkdir("tempdir") recovery_info = { "configuration_files": ["postgresql.conf", "postgresql.auto.conf"], "tempdir": tempdir.strpath, "temporary_configuration_files": [], "results": {"changes": [], "warnings": [], "missing_files": []}, } backup_info = testing_helpers.build_test_backup_info() backup_info.config.basebackups_directory = tmpdir.strpath datadir = tmpdir.mkdir(backup_info.backup_id).mkdir("data") postgresql_conf_local = datadir.join("postgresql.conf") postgresql_auto_local = datadir.join("postgresql.auto.conf") postgresql_conf_local.write("archive_command = something\n" "data_directory = something") postgresql_auto_local.write("archive_command = something\n" "data_directory = something") # Build a RecoveryExecutor object (using a mock as server and backup # manager. backup_manager = testing_helpers.build_backup_manager() executor = RecoveryExecutor(backup_manager) executor._map_temporary_config_files(recovery_info, backup_info, "ssh@something") # check that configuration files have been moved by the method assert tempdir.join("postgresql.conf").check() assert tempdir.join("postgresql.conf").computehash() == postgresql_conf_local.computehash() assert tempdir.join("postgresql.auto.conf").check() assert tempdir.join("postgresql.auto.conf").computehash() == postgresql_auto_local.computehash() assert recovery_info["results"]["missing_files"] == ["pg_hba.conf", "pg_ident.conf"]
def test_set_pitr_targets(self, tmpdir): """ Evaluate targets for point in time recovery """ # Build basic folder/files structure tempdir = tmpdir.mkdir("temp_dir") dest = tmpdir.mkdir("dest") wal_dest = tmpdir.mkdir("wal_dest") recovery_info = { "configuration_files": ["postgresql.conf", "postgresql.auto.conf"], "tempdir": tempdir.strpath, "results": {"changes": [], "warnings": []}, "is_pitr": False, "wal_dest": wal_dest.strpath, "get_wal": False, } backup_info = testing_helpers.build_test_backup_info() backup_manager = testing_helpers.build_backup_manager() # Build a recovery executor executor = RecoveryExecutor(backup_manager) executor._set_pitr_targets(recovery_info, backup_info, dest.strpath, "", "", "", "") # Test with empty values (no PITR) assert recovery_info["target_epoch"] is None assert recovery_info["target_datetime"] is None assert recovery_info["wal_dest"] == wal_dest.strpath # Test for PITR targets executor._set_pitr_targets( recovery_info, backup_info, dest.strpath, "target_name", "2015-06-03 16:11:03.71038+02", "2", None ) target_datetime = dateutil.parser.parse("2015-06-03 16:11:03.710380+02:00") target_epoch = time.mktime(target_datetime.timetuple()) + (target_datetime.microsecond / 1000000.0) assert recovery_info["target_datetime"] == target_datetime assert recovery_info["target_epoch"] == target_epoch assert recovery_info["wal_dest"] == dest.join("barman_xlog").strpath
def test_pgespresso_stop_backup(self, tbs_map_mock, label_mock): """ Basic test for the pgespresso_stop_backup method """ # Build a backup info and configure the mocks server = build_mocked_server(main_conf={ 'backup_options': BackupOptions.CONCURRENT_BACKUP }) backup_manager = build_backup_manager(server=server) # Mock executor._pgespresso_stop_backup(backup_info) call stop_time = datetime.datetime.now() server.postgres.server_version = 90500 server.postgres.pgespresso_stop_backup.return_value = { 'end_wal': "000000060000A25700000044", 'timestamp': stop_time } backup_info = build_test_backup_info(timeline=6) backup_manager.executor.strategy.stop_backup(backup_info) assert backup_info.end_xlog == 'A257/44FFFFFF' assert backup_info.end_wal == '000000060000A25700000044' assert backup_info.end_offset == 0xFFFFFF assert backup_info.end_time == stop_time
def test_setup(self, rsync_mock): """ Test the method that set up a recovery """ backup_info = testing_helpers.build_test_backup_info() backup_manager = testing_helpers.build_backup_manager() executor = RecoveryExecutor(backup_manager) backup_info.version = 90300 # setup should create a temporary directory # and teardown should delete it ret = executor._setup(backup_info, None, "/tmp") assert os.path.exists(ret["tempdir"]) executor._teardown(ret) assert not os.path.exists(ret["tempdir"]) # no postgresql.auto.conf on version 9.3 ret = executor._setup(backup_info, None, "/tmp") executor._teardown(ret) assert "postgresql.auto.conf" not in ret["configuration_files"] # Check the present for postgresql.auto.conf on version 9.4 backup_info.version = 90400 ret = executor._setup(backup_info, None, "/tmp") executor._teardown(ret) assert "postgresql.auto.conf" in ret["configuration_files"] # Receive a error if the remote command is invalid with pytest.raises(SystemExit): executor.server.path = None executor._setup(backup_info, "invalid", "/tmp")
def test_archive_wal_no_backup(self, tmpdir, capsys): """ Test archive-wal behaviour when there are no backups. Expect it to trash WAL files """ # Build a real backup manager backup_manager = build_backup_manager(name="TestServer", global_conf={"barman_home": tmpdir.strpath}) backup_manager.compression_manager.get_compressor.return_value = None backup_manager.server.get_backup.return_value = None # Build the basic folder structure and files basedir = tmpdir.join("main") incoming_dir = basedir.join("incoming") archive_dir = basedir.join("wals") xlog_db = archive_dir.join("xlog.db") wal_name = "000000010000000000000001" wal_file = incoming_dir.join(wal_name) wal_file.ensure() archive_dir.ensure(dir=True) xlog_db.ensure() backup_manager.server.xlogdb.return_value.__enter__.return_value = xlog_db.open(mode="a") backup_manager.server.archivers = [FileWalArchiver(backup_manager)] backup_manager.archive_wal() # Check that the WAL file is not present inside the wal catalog with xlog_db.open() as f: line = str(f.readline()) assert wal_name not in line wal_path = os.path.join(archive_dir.strpath, barman.xlog.hash_dir(wal_name), wal_name) # Check that the wal file have not been archived assert not os.path.exists(wal_path) out, err = capsys.readouterr() # Check the output for the removal of the wal file assert ("No base backup available. Trashing file %s" % wal_name) in out
def test_get_backup(self, tmpdir): """ Check the get_backup method that uses the backups cache to retrieve a backup using the id """ # Setup temp dir and server # build a backup_manager and setup a basic configuration backup_manager = build_backup_manager( name='TestServer', global_conf={ 'barman_home': tmpdir.strpath }) # Create a BackupInfo object with status DONE b_info = build_test_backup_info( backup_id='fake_backup_id', server=backup_manager.server, ) b_info.save() assert backup_manager._backup_cache is None # Check that the backup returned is the same assert backup_manager.get_backup(b_info.backup_id).to_dict() == \ b_info.to_dict() # Empty the backup manager cache backup_manager._backup_cache = {} # Check that the backup returned is None assert backup_manager.get_backup(b_info.backup_id) is None
def test_check_receivexlog_installed(self, command_mock): """ Test for the check method of the StreamingWalArchiver class """ backup_manager = build_backup_manager() backup_manager.server.postgres.server_txt_version = "9.2" command_mock.side_effect = CommandFailedException archiver = StreamingWalArchiver(backup_manager) result = archiver.get_remote_status() assert result == { "pg_receivexlog_installed": False, "pg_receivexlog_path": None, "pg_receivexlog_compatible": None, 'pg_receivexlog_synchronous': None, "pg_receivexlog_version": None, "pg_receivexlog_supports_slots": None, } backup_manager.server.postgres.server_txt_version = "9.2" command_mock.side_effect = None command_mock.return_value.cmd = '/some/path/to/pg_receivexlog' command_mock.return_value.side_effect = CommandFailedException archiver.reset_remote_status() result = archiver.get_remote_status() assert result == { "pg_receivexlog_installed": True, "pg_receivexlog_path": "/some/path/to/pg_receivexlog", "pg_receivexlog_compatible": None, 'pg_receivexlog_synchronous': None, "pg_receivexlog_version": None, "pg_receivexlog_supports_slots": None, }
def test_rsync_backup_executor_init(self): """ Test the construction of a RecoveryExecutor """ # Test backup_manager = testing_helpers.build_backup_manager() assert RecoveryExecutor(backup_manager)
def test_check(self, remote_mock, capsys): """ Test management of check_postgres view output :param remote_mock: mock get_remote_status function :param capsys: retrieve output from consolle """ # Create a backup_manager backup_manager = build_backup_manager() # Set up mock responses streaming = backup_manager.server.streaming streaming.server_txt_version = "9.5" # Instantiate a StreamingWalArchiver obj archiver = StreamingWalArchiver(backup_manager) # Prepare the output check strategy strategy = CheckOutputStrategy() # Case: correct configuration remote_mock.return_value = { "pg_receivexlog_installed": True, "pg_receivexlog_compatible": True, "pg_receivexlog_path": "fake/path", } # Expect out: all parameters: OK archiver.check(strategy) (out, err) = capsys.readouterr() assert out == "\tpg_receivexlog: OK\n" "\tpg_receivexlog compatible: OK\n" # Case: pg_receivexlog is not compatible remote_mock.return_value = { "pg_receivexlog_installed": True, "pg_receivexlog_compatible": False, "pg_receivexlog_path": "fake/path", "pg_receivexlog_version": "9.2", } # Expect out: some parameters: FAILED strategy = CheckOutputStrategy() archiver.check(strategy) (out, err) = capsys.readouterr() assert ( out == "\tpg_receivexlog: OK\n" "\tpg_receivexlog compatible: FAILED " "(PostgreSQL version: 9.5, pg_receivexlog version: 9.2)\n" ) # Case: pg_receivexlog returned error remote_mock.return_value = { "pg_receivexlog_installed": True, "pg_receivexlog_compatible": None, "pg_receivexlog_path": "fake/path", "pg_receivexlog_version": None, } # Expect out: all parameters: OK archiver.check(strategy) (out, err) = capsys.readouterr() assert ( out == "\tpg_receivexlog: OK\n" "\tpg_receivexlog compatible: FAILED " "(PostgreSQL version: 9.5, pg_receivexlog version: None)\n" )
def test_exclusive_start_backup(self): """ Basic test for the start_backup method :param start_mock: mock for the _pgespresso_start_backup :param start_mock: mock for the pg_start_backup """ # Build a backup_manager using a mocked server server = build_mocked_server(main_conf={ 'backup_options': BackupOptions.EXCLUSIVE_BACKUP }) backup_manager = build_backup_manager(server=server) # Mock server.get_pg_setting('data_directory') call backup_manager.server.postgres.get_setting.return_value = '/pg/data' # Mock server.get_pg_configuration_files() call server.postgres.get_configuration_files.return_value = dict( config_file="/etc/postgresql.conf", hba_file="/pg/pg_hba.conf", ident_file="/pg/pg_ident.conf", ) # Mock server.get_pg_tablespaces() call tablespaces = [Tablespace._make(('test_tbs', 1234, '/tbs/test'))] server.postgres.get_tablespaces.return_value = tablespaces # Test 1: start exclusive backup # Mock executor.pg_start_backup(label) call start_time = datetime.datetime.now() server.postgres.start_exclusive_backup.return_value = ( "A257/44B4C0D8", "000000060000A25700000044", 11845848, start_time) # Build a test empty backup info backup_info = BackupInfo(server=backup_manager.server, backup_id='fake_id') backup_manager.executor.strategy.start_backup(backup_info) # Check that all the values are correctly saved inside the BackupInfo assert backup_info.pgdata == '/pg/data' assert backup_info.config_file == "/etc/postgresql.conf" assert backup_info.hba_file == "/pg/pg_hba.conf" assert backup_info.ident_file == "/pg/pg_ident.conf" assert backup_info.tablespaces == tablespaces assert backup_info.status == 'STARTED' assert backup_info.timeline == 6 assert backup_info.begin_xlog == 'A257/44B4C0D8' assert backup_info.begin_wal == '000000060000A25700000044' assert backup_info.begin_offset == 11845848 assert backup_info.begin_time == start_time # Check that the correct call to pg_start_backup has been made server.postgres.start_exclusive_backup.assert_called_with( 'Barman backup main fake_id')
def test_pgespresso_start_backup(self): """ Test concurrent backup using pgespresso """ # Test: start concurrent backup # Build a backup_manager using a mocked server server = build_mocked_server(main_conf={ 'backup_options': BackupOptions.CONCURRENT_BACKUP }) backup_manager = build_backup_manager(server=server) # Mock server.get_pg_setting('data_directory') call backup_manager.server.postgres.get_setting.return_value = '/pg/data' # Mock server.get_pg_configuration_files() call server.postgres.get_configuration_files.return_value = dict( config_file="/etc/postgresql.conf", hba_file="/pg/pg_hba.conf", ident_file="/pg/pg_ident.conf", ) # Mock server.get_pg_tablespaces() call tablespaces = [Tablespace._make(('test_tbs', 1234, '/tbs/test'))] server.postgres.get_tablespaces.return_value = tablespaces server.postgres.server_version = 90500 # Mock executor._pgespresso_start_backup(label) call start_time = datetime.datetime.now(tz.tzlocal()).replace(microsecond=0) server.postgres.pgespresso_start_backup.return_value = { 'backup_label': "START WAL LOCATION: 266/4A9C1EF8 " "(file 00000010000002660000004A)\n" "START TIME: %s" % start_time.strftime('%Y-%m-%d %H:%M:%S %Z'), } # Build a test empty backup info backup_info = BackupInfo(server=backup_manager.server, backup_id='fake_id2') backup_manager.executor.strategy.start_backup(backup_info) # Check that all the values are correctly saved inside the BackupInfo assert backup_info.pgdata == '/pg/data' assert backup_info.config_file == "/etc/postgresql.conf" assert backup_info.hba_file == "/pg/pg_hba.conf" assert backup_info.ident_file == "/pg/pg_ident.conf" assert backup_info.tablespaces == tablespaces assert backup_info.status == 'STARTED' assert backup_info.timeline == 16 assert backup_info.begin_xlog == '266/4A9C1EF8' assert backup_info.begin_wal == '00000010000002660000004A' assert backup_info.begin_offset == 10231544 assert backup_info.begin_time == start_time # Check that the correct call to pg_start_backup has been made server.postgres.pgespresso_start_backup.assert_called_with( 'Barman backup main fake_id2')
def test_check(self, remote_mock, capsys): """ Test management of check_postgres view output :param remote_mock: mock get_remote_status function :param capsys: retrieve output from consolle """ # Create a backup_manager backup_manager = build_backup_manager() # Set up mock responses postgres = backup_manager.server.postgres postgres.server_version = 90501 # Instantiate a FileWalArchiver obj archiver = FileWalArchiver(backup_manager) # Prepare the output check strategy strategy = CheckOutputStrategy() # Case: no reply by PostgreSQL remote_mock.return_value = { 'archive_mode': None, 'archive_command': None, } # Expect no output from check archiver.check(strategy) (out, err) = capsys.readouterr() assert out == '' # Case: correct configuration remote_mock.return_value = { 'archive_mode': 'on', 'archive_command': 'wal to archive', 'is_archiving': True, 'incoming_wals_count': 0, } # Expect out: all parameters: OK archiver.check(strategy) (out, err) = capsys.readouterr() assert out == \ "\tarchive_mode: OK\n" \ "\tarchive_command: OK\n" \ "\tcontinuous archiving: OK\n" # Case: archive_command value is not acceptable remote_mock.return_value = { 'archive_command': None, 'archive_mode': 'on', 'is_archiving': False, 'incoming_wals_count': 0, } # Expect out: some parameters: FAILED archiver.check(strategy) (out, err) = capsys.readouterr() assert out == \ "\tarchive_mode: OK\n" \ "\tarchive_command: FAILED " \ "(please set it accordingly to documentation)\n" # Case: all but is_archiving ok remote_mock.return_value = { 'archive_mode': 'on', 'archive_command': 'wal to archive', 'is_archiving': False, 'incoming_wals_count': 0, } # Expect out: all parameters: OK archiver.check(strategy) (out, err) = capsys.readouterr() assert out == \ "\tarchive_mode: OK\n" \ "\tarchive_command: OK\n" \ "\tcontinuous archiving: FAILED\n" # Case: too many wal files in the incoming queue archiver.config.max_incoming_wals_queue = 10 remote_mock.return_value = { 'archive_mode': 'on', 'archive_command': 'wal to archive', 'is_archiving': False, 'incoming_wals_count': 20, } # Expect out: the wals incoming queue is too big archiver.check(strategy) (out, err) = capsys.readouterr() assert out == \ "\tarchive_mode: OK\n" \ "\tarchive_command: OK\n" \ "\tcontinuous archiving: FAILED\n" \
def test_backup_copy(self, rsync_mock, tmpdir): """ Test the execution of a rsync copy :param rsync_mock: mock for the RsyncCopyController object :param tmpdir: temporary dir """ backup_manager = build_backup_manager(global_conf={ 'barman_home': tmpdir.mkdir('home').strpath }) backup_manager.server.path = None backup_info = build_test_backup_info( server=backup_manager.server, pgdata="/pg/data", config_file="/etc/postgresql.conf", hba_file="/pg/data/pg_hba.conf", ident_file="/pg/data/pg_ident.conf", begin_xlog="0/2000028", begin_wal="000000010000000000000002", begin_offset=28) backup_info.save() # This is to check that all the preparation is done correctly assert os.path.exists(backup_info.filename) backup_manager.executor.backup_copy(backup_info) assert rsync_mock.mock_calls == [ mock.call(reuse_backup=None, safe_horizon=None, network_compression=False, ssh_command='ssh', path=None, ssh_options=['-c', '"arcfour"', '-p', '22', '*****@*****.**', '-o', 'BatchMode=yes', '-o', 'StrictHostKeyChecking=no'], retry_sleep=30, retry_times=0), mock.call().add_directory( label='tbs1', src=':/fake/location/', dst=backup_info.get_data_directory(16387), reuse=None, bwlimit=None, item_class=rsync_mock.return_value.TABLESPACE_CLASS), mock.call().add_directory( label='tbs2', src=':/another/location/', dst=backup_info.get_data_directory(16405), reuse=None, bwlimit=None, item_class=rsync_mock.return_value.TABLESPACE_CLASS), mock.call().add_directory( label='pgdata', src=':/pg/data/', dst=backup_info.get_data_directory(), reuse=None, bwlimit=None, item_class=rsync_mock.return_value.PGDATA_CLASS, exclude=['/pg_xlog/*', '/pg_log/*', '/recovery.conf', '/postmaster.pid'], exclude_and_protect=['pg_tblspc/16387', 'pg_tblspc/16405']), mock.call().add_file( label='pg_control', src=':/pg/data/global/pg_control', dst='%s/global/pg_control' % backup_info.get_data_directory(), item_class=rsync_mock.return_value.PGCONTROL_CLASS), mock.call().add_file( label='config_file', src=':/etc/postgresql.conf', dst=backup_info.get_data_directory(), item_class=rsync_mock.return_value.CONFIG_CLASS, optional=False), mock.call().copy(), ]
def test_get_next_batch(self, from_file_mock, isfile_mock, exists_mock, glob_mock, caplog): """ Test the FileWalArchiver.get_next_batch method """ # WAL batch, with 000000010000000000000001 that is currently being # written glob_mock.return_value = ['000000010000000000000001'] isfile_mock.return_value = True # This is an hack, instead of a WalFileInfo we use a simple string to # ease all the comparisons. The resulting string is the name enclosed # in colons. e.g. ":000000010000000000000001:" from_file_mock.side_effect = lambda wal_name, compression: (':%s:' % wal_name) backup_manager = build_backup_manager(name='TestServer') archiver = StreamingWalArchiver(backup_manager) backup_manager.server.archivers = [archiver] caplog_reset(caplog) batch = archiver.get_next_batch() assert ['000000010000000000000001'] == batch.skip assert '' == caplog.text # WAL batch, with 000000010000000000000002 that is currently being # written and 000000010000000000000001 can be archived caplog_reset(caplog) glob_mock.return_value = [ '000000010000000000000001', '000000010000000000000002', ] batch = archiver.get_next_batch() assert [':000000010000000000000001:'] == batch assert ['000000010000000000000002'] == batch.skip assert '' == caplog.text # WAL batch, with two partial files. caplog_reset(caplog) glob_mock.return_value = [ '000000010000000000000001.partial', '000000010000000000000002.partial', ] batch = archiver.get_next_batch() assert [':000000010000000000000001.partial:'] == batch assert ['000000010000000000000002.partial'] == batch.skip assert ('Archiving partial files for server %s: ' '000000010000000000000001.partial' % archiver.config.name) in caplog.text # WAL batch, with history files. caplog_reset(caplog) glob_mock.return_value = [ '00000001.history', '000000010000000000000002.partial', ] batch = archiver.get_next_batch() assert [':00000001.history:'] == batch assert ['000000010000000000000002.partial'] == batch.skip assert '' == caplog.text # WAL batch with errors wrong_file_name = 'test_wrong_wal_file.2' glob_mock.return_value = ['test_wrong_wal_file.2'] batch = archiver.get_next_batch() assert [wrong_file_name] == batch.errors # WAL batch, with two partial files, but one has been just renamed. caplog_reset(caplog) exists_mock.side_effect = [False, True] glob_mock.return_value = [ '000000010000000000000001.partial', '000000010000000000000002.partial', ] batch = archiver.get_next_batch() assert len(batch) == 0 assert ['000000010000000000000002.partial'] == batch.skip assert '' in caplog.text
def test_check(self, remote_mock, capsys): """ Test management of check_postgres view output :param remote_mock: mock get_remote_status function :param capsys: retrieve output from consolle """ # Create a backup_manager backup_manager = build_backup_manager() # Set up mock responses streaming = backup_manager.server.streaming streaming.server_txt_version = '9.5' # Instantiate a StreamingWalArchiver obj archiver = StreamingWalArchiver(backup_manager) # Prepare the output check strategy strategy = CheckOutputStrategy() # Case: correct configuration remote_mock.return_value = { 'pg_receivexlog_installed': True, 'pg_receivexlog_compatible': True, 'pg_receivexlog_path': 'fake/path', 'incoming_wals_count': 0, } # Expect out: all parameters: OK backup_manager.server.process_manager.list.return_value = [] archiver.check(strategy) (out, err) = capsys.readouterr() assert out == \ "\tpg_receivexlog: OK\n" \ "\tpg_receivexlog compatible: OK\n" \ "\treceive-wal running: FAILED " \ "(See the Barman log file for more details)\n" # Case: pg_receivexlog is not compatible remote_mock.return_value = { 'pg_receivexlog_installed': True, 'pg_receivexlog_compatible': False, 'pg_receivexlog_path': 'fake/path', 'pg_receivexlog_version': '9.2', 'incoming_wals_count': 0, } # Expect out: some parameters: FAILED strategy = CheckOutputStrategy() archiver.check(strategy) (out, err) = capsys.readouterr() assert out == \ "\tpg_receivexlog: OK\n" \ "\tpg_receivexlog compatible: FAILED " \ "(PostgreSQL version: 9.5, pg_receivexlog version: 9.2)\n" \ "\treceive-wal running: FAILED " \ "(See the Barman log file for more details)\n" # Case: pg_receivexlog returned error remote_mock.return_value = { 'pg_receivexlog_installed': True, 'pg_receivexlog_compatible': None, 'pg_receivexlog_path': 'fake/path', 'pg_receivexlog_version': None, 'incoming_wals_count': 0, } # Expect out: all parameters: OK archiver.check(strategy) (out, err) = capsys.readouterr() assert out == \ "\tpg_receivexlog: OK\n" \ "\tpg_receivexlog compatible: FAILED " \ "(PostgreSQL version: 9.5, pg_receivexlog version: None)\n" \ "\treceive-wal running: FAILED " \ "(See the Barman log file for more details)\n" # Case: receive-wal running backup_manager.server.process_manager.list.return_value = [ ProcessInfo(pid=1, server_name=backup_manager.config.name, task="receive-wal") ] archiver.check(strategy) (out, err) = capsys.readouterr() assert out == \ "\tpg_receivexlog: OK\n" \ "\tpg_receivexlog compatible: FAILED " \ "(PostgreSQL version: 9.5, pg_receivexlog version: None)\n" \ "\treceive-wal running: OK\n" # Case: streaming connection not configured backup_manager.server.streaming = None archiver.check(strategy) (out, err) = capsys.readouterr() assert out == \ "\tpg_receivexlog: OK\n" \ "\tpg_receivexlog compatible: FAILED " \ "(PostgreSQL version: Unknown, pg_receivexlog version: None)\n" \ "\treceive-wal running: OK\n" # Case: too many wal files in the incoming queue archiver.config.max_incoming_wals_queue = 10 remote_mock.return_value = { 'pg_receivexlog_installed': True, 'pg_receivexlog_compatible': None, 'pg_receivexlog_path': 'fake/path', 'pg_receivexlog_version': None, 'incoming_wals_count': 20, } # Expect out: the wals incoming queue is too big archiver.check(strategy) (out, err) = capsys.readouterr() assert out == \ "\tpg_receivexlog: OK\n" \ "\tpg_receivexlog compatible: FAILED " \ "(PostgreSQL version: Unknown, pg_receivexlog version: None)\n" \ "\treceive-wal running: OK\n" \
def test_receive_wal(self, receivexlog_mock, remote_mock, tmpdir): backup_manager = build_backup_manager( main_conf={'backup_directory': tmpdir}, ) streaming_mock = backup_manager.server.streaming streaming_mock.server_txt_version = "9.4.0" streaming_mock.get_connection_string.return_value = ( 'host=pg01.nowhere user=postgres port=5432 ' 'application_name=barman_receive_wal') streaming_mock.get_remote_status.return_value = { "streaming_supported": True } backup_manager.server.streaming.conn_parameters = { 'host': 'pg01.nowhere', 'user': '******', 'port': '5432', } streaming_dir = tmpdir.join('streaming') streaming_dir.ensure(dir=True) # Test: normal run archiver = StreamingWalArchiver(backup_manager) archiver.server.streaming.server_version = 90400 remote_mock.return_value = { 'pg_receivexlog_installed': True, 'pg_receivexlog_compatible': True, 'pg_receivexlog_synchronous': None, 'pg_receivexlog_path': 'fake/path', 'pg_receivexlog_supports_slots': True, 'pg_receivexlog_version': '9.4', } # Test: execute a reset request partial = streaming_dir.join('test.partial') partial.ensure() archiver.receive_wal(reset=True) assert not partial.check() archiver.receive_wal(reset=False) receivexlog_mock.assert_called_once_with( app_name='barman_receive_wal', synchronous=None, connection=ANY, destination=streaming_dir.strpath, err_handler=ANY, out_handler=ANY, path=ANY, slot_name=None, command='fake/path', version='9.4') receivexlog_mock.return_value.execute.assert_called_once_with() # Test: pg_receivexlog from 9.2 receivexlog_mock.reset_mock() remote_mock.return_value = { 'pg_receivexlog_installed': True, 'pg_receivexlog_compatible': True, 'pg_receivexlog_synchronous': False, 'pg_receivexlog_path': 'fake/path', 'pg_receivexlog_supports_slots': False, 'pg_receivexlog_version': '9.2', } archiver.receive_wal(reset=False) receivexlog_mock.assert_called_once_with( app_name='barman_receive_wal', synchronous=False, connection=ANY, destination=streaming_dir.strpath, err_handler=ANY, out_handler=ANY, path=ANY, command='fake/path', slot_name=None, version='9.2') receivexlog_mock.return_value.execute.assert_called_once_with() # Test: incompatible pg_receivexlog with pytest.raises(ArchiverFailure): remote_mock.return_value = { 'pg_receivexlog_installed': True, 'pg_receivexlog_compatible': False, 'pg_receivexlog_supports_slots': False, 'pg_receivexlog_synchronous': False, 'pg_receivexlog_path': 'fake/path' } archiver.receive_wal() # Test: missing pg_receivexlog with pytest.raises(ArchiverFailure): remote_mock.return_value = { 'pg_receivexlog_installed': False, 'pg_receivexlog_compatible': True, 'pg_receivexlog_supports_slots': False, 'pg_receivexlog_synchronous': False, 'pg_receivexlog_path': 'fake/path' } archiver.receive_wal() # Test: impossible to connect with streaming protocol with pytest.raises(ArchiverFailure): backup_manager.server.streaming.get_remote_status.return_value = { 'streaming_supported': None } remote_mock.return_value = { 'pg_receivexlog_installed': True, 'pg_receivexlog_supports_slots': False, 'pg_receivexlog_compatible': True, 'pg_receivexlog_synchronous': False, 'pg_receivexlog_path': 'fake/path' } archiver.receive_wal() # Test: PostgreSQL too old with pytest.raises(ArchiverFailure): backup_manager.server.streaming.get_remote_status.return_value = { 'streaming_supported': False } remote_mock.return_value = { 'pg_receivexlog_installed': True, 'pg_receivexlog_compatible': True, 'pg_receivexlog_synchronous': False, 'pg_receivexlog_path': 'fake/path' } archiver.receive_wal() # Test: general failure executing pg_receivexlog with pytest.raises(ArchiverFailure): remote_mock.return_value = { 'pg_receivexlog_installed': True, 'pg_receivexlog_compatible': True, 'pg_receivexlog_synchronous': False, 'pg_receivexlog_path': 'fake/path' } receivexlog_mock.return_value.execute.side_effect = \ CommandFailedException archiver.receive_wal()
def test_init(self): """ Basic init test for the FileWalArchiver class """ backup_manager = build_backup_manager() FileWalArchiver(backup_manager)
def test_recovery_pre_script(self, command_mock): """ Unit test specific for the execution of a pre recovery script. test case: simulate the execution of a pre recovery script, should return 0 test the environment for the HookScriptRunner obj. test the name of the fake script, should be the same as the one in the mocked configuration """ # BackupManager mock backup_manager = build_backup_manager(name="test_server") backup_manager.config.pre_recovery_script = "test_recovery_pre_script" backup_manager.get_previous_backup = MagicMock() backup_manager.get_previous_backup.side_effect = UnknownBackupIdException() backup_manager.get_next_backup = MagicMock() backup_manager.get_next_backup.side_effect = UnknownBackupIdException() # BackupInfo mock backup_info = MagicMock(name="backup_info") backup_info.get_basebackup_directory.return_value = "backup_directory" backup_info.backup_id = "123456789XYZ" backup_info.error = None backup_info.status = "OK" # Command mock executed by HookScriptRunner command_mock.return_value.return_value = 0 # the actual test script = HookScriptRunner(backup_manager, "recovery_script", "pre") script.env_from_recover( backup_info, dest="fake_dest", tablespaces={ "first": "/first/relocated", "second": "/another/location", }, remote_command="ssh user@host", target_name="name", exclusive=True, ) expected_env = { "BARMAN_PHASE": "pre", "BARMAN_VERSION": version, "BARMAN_SERVER": "test_server", "BARMAN_CONFIGURATION": "build_config_from_dicts", "BARMAN_HOOK": "recovery_script", "BARMAN_BACKUP_DIR": "backup_directory", "BARMAN_BACKUP_ID": "123456789XYZ", "BARMAN_ERROR": "", "BARMAN_STATUS": "OK", "BARMAN_PREVIOUS_ID": "", "BARMAN_NEXT_ID": "", "BARMAN_RETRY": "0", "BARMAN_DESTINATION_DIRECTORY": "fake_dest", "BARMAN_TABLESPACES": '{"first": "/first/relocated", ' '"second": "/another/location"}', "BARMAN_REMOTE_COMMAND": "ssh user@host", "BARMAN_RECOVER_OPTIONS": '{"exclusive": true, ' '"target_name": "name"}', } assert script.run() == 0 assert command_mock.call_count == 1 assert command_mock.call_args[1]["env_append"] == expected_env assert script.script == backup_manager.config.pre_recovery_script
def test_init(self): """ Basic init test for the StreamingWalArchiver class """ backup_manager = build_backup_manager() StreamingWalArchiver(backup_manager)
def test_get_latest_archived_wals_info(self, tmpdir): """ Test the get_latest_archived_wals_info method """ # build a backup_manager and setup a basic configuration backup_manager = build_backup_manager( main_conf={ 'backup_directory': tmpdir.strpath, }) # Test: insistent wals directory assert backup_manager.get_latest_archived_wals_info() == dict() # Test: empty wals directory wals = tmpdir.join('wals').ensure(dir=True) assert backup_manager.get_latest_archived_wals_info() == dict() # Test: ignore WAL-like files in the root wals.join('000000010000000000000003').ensure() assert backup_manager.get_latest_archived_wals_info() == dict() # Test: find the fist WAL wals.join('0000000100000000').join('000000010000000000000001').ensure() latest = backup_manager.get_latest_archived_wals_info() assert latest assert len(latest) == 1 assert latest['00000001'].name == '000000010000000000000001' # Test: find the 2nd WAL in the same dir wals.join('0000000100000000').join('000000010000000000000002').ensure() latest = backup_manager.get_latest_archived_wals_info() assert latest assert len(latest) == 1 assert latest['00000001'].name == '000000010000000000000002' # Test: the newer dir is empty wals.join('0000000100000001').ensure(dir=True) latest = backup_manager.get_latest_archived_wals_info() assert latest assert len(latest) == 1 assert latest['00000001'].name == '000000010000000000000002' # Test: the newer contains a newer file wals.join('0000000100000001').join('000000010000000100000001').ensure() latest = backup_manager.get_latest_archived_wals_info() assert latest assert len(latest) == 1 assert latest['00000001'].name == '000000010000000100000001' # Test: ignore out of order files wals.join('0000000100000000').join('000000010000000100000005').ensure() latest = backup_manager.get_latest_archived_wals_info() assert latest assert len(latest) == 1 assert latest['00000001'].name == '000000010000000100000001' # Test: find the 2nd timeline wals.join('0000000200000000').join('000000020000000000000003').ensure() latest = backup_manager.get_latest_archived_wals_info() assert latest assert len(latest) == 2 assert latest['00000001'].name == '000000010000000100000001' assert latest['00000002'].name == '000000020000000000000003'
def test_archive_wal(self, tmpdir, capsys): """ Test WalArchiver.archive_wal behaviour when the WAL file already exists in the archive """ # Setup the test environment backup_manager = build_backup_manager( name='TestServer', global_conf={'barman_home': tmpdir.strpath}) backup_manager.compression_manager.get_compressor.return_value = None backup_manager.server.get_backup.return_value = None basedir = tmpdir.join('main') incoming_dir = basedir.join('incoming') archive_dir = basedir.join('wals') xlog_db = archive_dir.join('xlog.db') wal_name = '000000010000000000000001' wal_file = incoming_dir.join(wal_name) wal_file.ensure() archive_dir.ensure(dir=True) xlog_db.ensure() backup_manager.server.xlogdb.return_value.__enter__.return_value = ( xlog_db.open(mode='a')) archiver = FileWalArchiver(backup_manager) backup_manager.server.archivers = [archiver] # Tests a basic archival process wal_info = WalFileInfo.from_file(wal_file.strpath) archiver.archive_wal(None, wal_info) assert not os.path.exists(wal_file.strpath) assert os.path.exists(wal_info.fullpath(backup_manager.server)) # Tests the archiver behaviour for duplicate WAL files, as the # wal file named '000000010000000000000001' was already archived # in the previous test wal_file.ensure() wal_info = WalFileInfo.from_file(wal_file.strpath) with pytest.raises(MatchingDuplicateWalFile): archiver.archive_wal(None, wal_info) # Tests the archiver behaviour for duplicated WAL files with # different contents wal_file.write('test') wal_info = WalFileInfo.from_file(wal_file.strpath) with pytest.raises(DuplicateWalFile): archiver.archive_wal(None, wal_info) # Tests the archiver behaviour for duplicate WAL files, as the # wal file named '000000010000000000000001' was already archived # in the previous test and the input file uses compression compressor = PyGZipCompressor(backup_manager.config, 'pygzip') compressor.compress(wal_file.strpath, wal_file.strpath) wal_info = WalFileInfo.from_file(wal_file.strpath) assert os.path.exists(wal_file.strpath) backup_manager.compression_manager.get_compressor \ .return_value = compressor with pytest.raises(MatchingDuplicateWalFile): archiver.archive_wal(None, wal_info) # Test the archiver behaviour when the incoming file is compressed # and it has been already archived and compressed. compressor.compress(wal_info.fullpath(backup_manager.server), wal_info.fullpath(backup_manager.server)) wal_info = WalFileInfo.from_file(wal_file.strpath) with pytest.raises(MatchingDuplicateWalFile): archiver.archive_wal(None, wal_info) # Reset the status of the incoming and WALs directory # removing the files archived during the preceding tests. os.unlink(wal_info.fullpath(backup_manager.server)) os.unlink(wal_file.strpath) # Test the archival of a WAL file using compression. wal_file.write('test') wal_info = WalFileInfo.from_file(wal_file.strpath) archiver.archive_wal(compressor, wal_info) assert os.path.exists(wal_info.fullpath(backup_manager.server)) assert not os.path.exists(wal_file.strpath) assert 'gzip' == identify_compression( wal_info.fullpath(backup_manager.server))
def test_delete_backup(self, mock_available_backups, tmpdir, caplog): """ Simple test for the deletion of a backup. We want to test the behaviour of the delete_backup method """ # Setup of the test backup_manager backup_manager = build_backup_manager() backup_manager.server.config.name = 'TestServer' backup_manager.server.config.barman_lock_directory = tmpdir.strpath backup_manager.server.config.backup_options = [] # Create a fake backup directory inside tmpdir (old format) base_dir = tmpdir.mkdir('base') backup_dir = base_dir.mkdir('fake_backup_id') pg_data = backup_dir.mkdir('pgdata') pg_data_v2 = backup_dir.mkdir('data') wal_dir = tmpdir.mkdir('wals') wal_history_file02 = wal_dir.join('00000002.history') wal_history_file03 = wal_dir.join('00000003.history') wal_history_file04 = wal_dir.join('00000004.history') wal_history_file02.write('1\t0/2000028\tat restore point "myrp"\n') wal_history_file03.write('1\t0/2000028\tat restore point "myrp"\n') wal_history_file04.write('1\t0/2000028\tat restore point "myrp"\n') wal_history_file04.write('2\t0/3000028\tunknown\n') wal_file = wal_dir.join('0000000100000000/000000010000000000000001') wal_file.ensure() xlog_db = wal_dir.join('xlog.db') xlog_db.write('000000010000000000000001\t42\t43\tNone\n' '00000002.history\t42\t43\tNone\n' '00000003.history\t42\t43\tNone\n' '00000004.history\t42\t43\tNone\n') backup_manager.server.xlogdb.return_value.__enter__.return_value = ( xlog_db.open()) backup_manager.server.config.basebackups_directory = base_dir.strpath backup_manager.server.config.wals_directory = wal_dir.strpath # The following tablespaces are defined in the default backup info # generated by build_test_backup_info b_info = build_test_backup_info( backup_id='fake_backup_id', server=backup_manager.server, ) # Make sure we are not trying to delete any WAL file, # just by having a previous backup b_pre_info = build_test_backup_info( backup_id='fake_backup', server=backup_manager.server, ) mock_available_backups.return_value = { "fake_backup": b_pre_info, "fake_backup_id": b_info, } # Test 1: minimum redundancy not satisfied caplog_reset(caplog) backup_manager.server.config.minimum_redundancy = 2 b_info.set_attribute('backup_version', 1) build_backup_directories(b_info) backup_manager.delete_backup(b_info) assert re.search('WARNING .* Skipping delete of backup ', caplog.text) assert 'ERROR' not in caplog.text assert os.path.exists(pg_data.strpath) assert not os.path.exists(pg_data_v2.strpath) assert os.path.exists(wal_file.strpath) assert os.path.exists(wal_history_file02.strpath) assert os.path.exists(wal_history_file03.strpath) assert os.path.exists(wal_history_file04.strpath) # Test 2: normal delete expecting no errors (old format) caplog_reset(caplog) backup_manager.server.config.minimum_redundancy = 1 b_info.set_attribute('backup_version', 1) build_backup_directories(b_info) backup_manager.delete_backup(b_info) # the backup must not exists on disk anymore assert 'WARNING' not in caplog.text assert 'ERROR' not in caplog.text assert not os.path.exists(pg_data.strpath) assert not os.path.exists(pg_data_v2.strpath) assert os.path.exists(wal_file.strpath) assert os.path.exists(wal_history_file02.strpath) assert os.path.exists(wal_history_file03.strpath) assert os.path.exists(wal_history_file04.strpath) # Test 3: delete the backup again, expect a failure in log caplog_reset(caplog) backup_manager.delete_backup(b_info) assert re.search('ERROR .* Failure deleting backup fake_backup_id', caplog.text) assert not os.path.exists(pg_data.strpath) assert not os.path.exists(pg_data_v2.strpath) assert os.path.exists(wal_file.strpath) assert os.path.exists(wal_history_file02.strpath) assert os.path.exists(wal_history_file03.strpath) assert os.path.exists(wal_history_file04.strpath) # Test 4: normal delete expecting no errors (new format) caplog_reset(caplog) b_info.set_attribute('backup_version', 2) build_backup_directories(b_info) backup_manager.delete_backup(b_info) assert 'WARNING' not in caplog.text assert 'ERROR' not in caplog.text assert not os.path.exists(pg_data.strpath) assert not os.path.exists(pg_data_v2.strpath) assert os.path.exists(wal_file.strpath) assert os.path.exists(wal_history_file02.strpath) assert os.path.exists(wal_history_file03.strpath) assert os.path.exists(wal_history_file04.strpath) # Test 5: normal delete of first backup no errors and no skip # removing one of the two backups present (new format) # and all the previous wal caplog_reset(caplog) b_pre_info.set_attribute('backup_version', 2) build_backup_directories(b_pre_info) backup_manager.delete_backup(b_pre_info) assert 'WARNING' not in caplog.text assert 'ERROR' not in caplog.text assert not os.path.exists(pg_data.strpath) assert not os.path.exists(pg_data_v2.strpath) assert not os.path.exists(wal_file.strpath) assert os.path.exists(wal_history_file02.strpath) assert os.path.exists(wal_history_file03.strpath) assert os.path.exists(wal_history_file04.strpath) # Test 6: normal delete of first backup no errors and no skip # removing one of the two backups present (new format) # the previous wal is retained as on a different timeline caplog_reset(caplog) wal_file.ensure() b_pre_info.set_attribute('timeline', 2) b_pre_info.set_attribute('backup_version', 2) build_backup_directories(b_pre_info) backup_manager.delete_backup(b_pre_info) assert 'WARNING' not in caplog.text assert 'ERROR' not in caplog.text assert not os.path.exists(pg_data.strpath) assert not os.path.exists(pg_data_v2.strpath) assert os.path.exists(wal_file.strpath) assert os.path.exists(wal_history_file02.strpath) assert os.path.exists(wal_history_file03.strpath) assert os.path.exists(wal_history_file04.strpath) # Test 7: simulate an error deleting the the backup. with patch('barman.backup.BackupManager.delete_backup_data')\ as mock_delete_data: caplog_reset(caplog) # We force delete_pgdata method to raise an exception. mock_delete_data.side_effect = OSError('TestError') wal_file.ensure() b_pre_info.set_attribute('backup_version', 2) build_backup_directories(b_pre_info) backup_manager.delete_backup(b_info) assert 'TestError' in caplog.text assert os.path.exists(wal_file.strpath) assert os.path.exists(wal_history_file02.strpath) assert os.path.exists(wal_history_file03.strpath) assert os.path.exists(wal_history_file04.strpath)
def test_delete_post_script(self, command_mock, caplog): """ Unit test specific for the execution of a post delete script. test case: simulate the execution of a post delete script, should return 1 simulating the failed execution of the script. test the log of the execution, should contain a warning message, the warning message should be the concatenation of the out and err properties of the Command object. test the environment for the HookScriptRunner obj. test the name of the fake script """ # BackupManager mock backup_manager = build_backup_manager(name="test_server") backup_manager.config.post_delete_script = "test_delete_post_script" backup_manager.get_previous_backup = MagicMock() backup_manager.get_previous_backup.side_effect = UnknownBackupIdException() backup_manager.get_next_backup = MagicMock() backup_manager.get_next_backup.side_effect = UnknownBackupIdException() # BackupInfo mock backup_info = MagicMock(name="backup_info") backup_info.get_basebackup_directory.return_value = "backup_directory" backup_info.backup_id = "123456789XYZ" backup_info.error = None backup_info.status = "OK" # Command mock executed by HookScriptRunner instance = command_mock.return_value # force the Cmd object to fail instance.return_value = 1 # create a standard out entry for the obj instance.out = "std_out_line\n" # create a standard err entry for the obj instance.err = "std_err_line\n" # the actual test script = HookScriptRunner(backup_manager, "delete_script", "post") script.env_from_backup_info(backup_info) expected_env = { "BARMAN_PHASE": "post", "BARMAN_VERSION": version, "BARMAN_SERVER": "test_server", "BARMAN_CONFIGURATION": "build_config_from_dicts", "BARMAN_HOOK": "delete_script", "BARMAN_BACKUP_DIR": "backup_directory", "BARMAN_BACKUP_ID": "123456789XYZ", "BARMAN_ERROR": "", "BARMAN_STATUS": "OK", "BARMAN_PREVIOUS_ID": "", "BARMAN_NEXT_ID": "", "BARMAN_RETRY": "0", } # ensure that the script failed assert script.run() == 1 # check the logs for a warning message. skip debug messages. for record in caplog.records: if record.levelname == "DEBUG": continue assert command_mock.call_count == 1 # check the env assert command_mock.call_args[1]["env_append"] == expected_env # check the script name assert script.script == backup_manager.config.post_delete_script
def test_check(self, command_mock, capsys): """ Check the ssh connection to a remote server """ backup_manager = build_backup_manager( global_conf={ # Silence the warning for default backup strategy 'backup_options': 'exclusive_backup', }) # Test 1: ssh ok check_strategy = CheckOutputStrategy() command_mock.return_value.get_last_output.return_value = ('', '') backup_manager.executor.check(check_strategy) out, err = capsys.readouterr() assert err == '' assert 'ssh: OK' in out # Test 2: ssh success, with unclean output (out) command_mock.reset_mock() command_mock.return_value.get_last_output.return_value = ( 'This is unclean', '') backup_manager.executor.check(check_strategy) out, err = capsys.readouterr() assert err == '' assert 'ssh output clean: FAILED' in out # Test 2bis: ssh success, with unclean output (err) command_mock.reset_mock() command_mock.return_value.get_last_output.return_value = ( '', 'This is unclean') backup_manager.executor.check(check_strategy) out, err = capsys.readouterr() assert err == '' assert 'ssh output clean: FAILED' in out # Test 3: ssh ok and PostgreSQL is not responding command_mock.reset_mock() command_mock.return_value.get_last_output.return_value = ('', '') check_strategy = CheckOutputStrategy() backup_manager.server.get_remote_status.return_value = { 'server_txt_version': None } backup_manager.server.get_backup.return_value.pgdata = 'test/' backup_manager.executor.check(check_strategy) out, err = capsys.readouterr() assert err == '' assert 'ssh: OK' in out assert "Check that the PostgreSQL server is up and no " \ "'backup_label' file is in PGDATA." in out # Test 3-err: ssh ok and PostgreSQL is not configured command_mock.reset_mock() command_mock.return_value.get_last_output.return_value = ('', '') check_strategy = CheckOutputStrategy() # No postgres instance, so no remote status keys available backup_manager.server.get_remote_status.return_value = {} backup_manager.server.get_backup.return_value.pgdata = 'test/' # No exception must raise backup_manager.executor.check(check_strategy) out, err = capsys.readouterr() assert err == '' assert 'ssh: OK' in out # Test 4: ssh failed command_mock.reset_mock() command_mock.side_effect = FsOperationFailed backup_manager.executor.check(check_strategy) out, err = capsys.readouterr() assert err == '' assert 'ssh: FAILED' in out
def test_check_redundancy(self, tmpdir): """ Test the check method """ # Setup temp dir and server # build a backup_manager and setup a basic configuration backup_manager = build_backup_manager(name='TestServer', global_conf={ 'barman_home': tmpdir.strpath, 'minimum_redundancy': "1" }) backup_manager.executor = mock.MagicMock() # Test the unsatisfied minimum_redundancy option strategy_mock = mock.MagicMock() backup_manager.check(strategy_mock) # Expect a failure from the method strategy_mock.result.assert_called_with( 'TestServer', False, hint='have 0 backups, expected at least 1') # Test the satisfied minimum_redundancy option b_info = build_test_backup_info( backup_id='fake_backup_id', server=backup_manager.server, ) b_info.save() strategy_mock.reset_mock() backup_manager._load_backup_cache() backup_manager.check(strategy_mock) # Expect a success from the method strategy_mock.result.assert_called_with( 'TestServer', True, hint='have 1 backups, expected at least 1') # Test for no failed backups strategy_mock.reset_mock() backup_manager._load_backup_cache() backup_manager.check(strategy_mock) # Expect a failure from the method strategy_mock.result.assert_any_call('TestServer', True, hint='there are 0 failed backups') # Test for failed backups in catalog b_info = build_test_backup_info( backup_id='failed_backup_id', server=backup_manager.server, status=BackupInfo.FAILED, ) b_info.save() strategy_mock.reset_mock() backup_manager._load_backup_cache() backup_manager.check(strategy_mock) # Expect a failure from the method strategy_mock.result.assert_any_call('TestServer', False, hint='there are 1 failed backups') # Test unknown compression backup_manager.config.compression = 'test_compression' backup_manager.compression_manager.check.return_value = False strategy_mock.reset_mock() backup_manager.check(strategy_mock) # Expect a failure from the method strategy_mock.result.assert_any_call('TestServer', False) # Test valid compression backup_manager.config.compression = 'test_compression' backup_manager.compression_manager.check.return_value = True strategy_mock.reset_mock() backup_manager.check(strategy_mock) # Expect a success from the method strategy_mock.result.assert_any_call('TestServer', True) # Test failure retrieving a compressor backup_manager.config.compression = 'test_compression' backup_manager.compression_manager.check.return_value = True backup_manager.compression_manager.get_default_compressor \ .side_effect = CompressionIncompatibility() strategy_mock.reset_mock() backup_manager.check(strategy_mock) # Expect a failure from the method strategy_mock.result.assert_any_call('TestServer', False)
def test_backup_copy_tablespaces_in_datadir(self, rsync_mock, tmpdir): """ Test the execution of a rsync copy with tablespaces in data directory :param rsync_mock: mock for the RsyncCopyController object :param tmpdir: temporary dir """ backup_manager = build_backup_manager( global_conf={'barman_home': tmpdir.mkdir('home').strpath}) backup_manager.server.path = None backup_manager.server.postgres.server_major_version = '9.6' backup_info = build_test_backup_info( server=backup_manager.server, pgdata="/pg/data", config_file="/etc/postgresql.conf", hba_file="/pg/data/pg_hba.conf", ident_file="/pg/data/pg_ident.conf", begin_xlog="0/2000028", begin_wal="000000010000000000000002", begin_offset=28, tablespaces=( ('tbs1', 16387, '/pg/data/tbs1'), ('tbs2', 16405, '/pg/data/pg_tblspc/tbs2'), ('tbs3', 123456, '/pg/data3'), ), ) backup_info.save() # This is to check that all the preparation is done correctly assert os.path.exists(backup_info.filename) backup_manager.executor.backup_copy(backup_info) assert rsync_mock.mock_calls == [ mock.call(reuse_backup=None, safe_horizon=None, network_compression=False, ssh_command='ssh', path=None, ssh_options=[ '-c', '"arcfour"', '-p', '22', '*****@*****.**', '-o', 'BatchMode=yes', '-o', 'StrictHostKeyChecking=no' ], retry_sleep=30, retry_times=0, workers=1), mock.call().add_directory( label='tbs1', src=':/pg/data/tbs1/', dst=backup_info.get_data_directory(16387), reuse=None, bwlimit=None, item_class=rsync_mock.return_value.TABLESPACE_CLASS, exclude=["/*"] + EXCLUDE_LIST, include=["/PG_9.6_*"]), mock.call().add_directory( label='tbs2', src=':/pg/data/pg_tblspc/tbs2/', dst=backup_info.get_data_directory(16405), reuse=None, bwlimit=None, item_class=rsync_mock.return_value.TABLESPACE_CLASS, exclude=["/*"] + EXCLUDE_LIST, include=["/PG_9.6_*"]), mock.call().add_directory( label='tbs3', src=':/pg/data3/', dst=backup_info.get_data_directory(123456), reuse=None, bwlimit=None, item_class=rsync_mock.return_value.TABLESPACE_CLASS, exclude=["/*"] + EXCLUDE_LIST, include=["/PG_9.6_*"]), mock.call().add_directory( label='pgdata', src=':/pg/data/', dst=backup_info.get_data_directory(), reuse=None, bwlimit=None, item_class=rsync_mock.return_value.PGDATA_CLASS, exclude=(PGDATA_EXCLUDE_LIST + EXCLUDE_LIST), exclude_and_protect=[ '/tbs1', '/pg_tblspc/16387', '/pg_tblspc/tbs2', '/pg_tblspc/16405', '/pg_tblspc/123456', ]), mock.call().add_file( label='pg_control', src=':/pg/data/global/pg_control', dst='%s/global/pg_control' % backup_info.get_data_directory(), item_class=rsync_mock.return_value.PGCONTROL_CLASS), mock.call().add_file( label='config_file', src=':/etc/postgresql.conf', dst=backup_info.get_data_directory(), item_class=rsync_mock.return_value.CONFIG_CLASS, optional=False), mock.call().copy(), mock.call().statistics(), ]
def test_backup_copy(self, remote_mock, pg_basebackup_mock, tmpdir, capsys): """ Test backup folder structure :param remote_mock: mock for the fetch_remote_status method :param pg_basebackup_mock: mock for the PgBaseBackup object :param tmpdir: pytest temp directory """ backup_manager = build_backup_manager( global_conf={ 'barman_home': tmpdir.mkdir('home').strpath, 'backup_method': 'postgres' }) # simulate a old version of pg_basebackup # not supporting bandwidth_limit remote_mock.return_value = { 'pg_basebackup_version': '9.2', 'pg_basebackup_path': '/fake/path', 'pg_basebackup_bwlimit': False, } server_mock = backup_manager.server streaming_mock = server_mock.streaming server_mock.config.bandwidth_limit = 1 streaming_mock.get_connection_string.return_value = 'fake=connstring' streaming_mock.conn_parameters = { 'host': 'fakeHost', 'port': 'fakePort', 'user': '******' } backup_info = build_test_backup_info(server=backup_manager.server, backup_id='fake_backup_id') backup_manager.executor.backup_copy(backup_info) out, err = capsys.readouterr() assert out == '' assert err == '' # check that the bwlimit option have been ignored assert pg_basebackup_mock.mock_calls == [ mock.call(connection=mock.ANY, version='9.2', app_name='barman_streaming_backup', destination=mock.ANY, command='/fake/path', tbs_mapping=mock.ANY, bwlimit=None, immediate=False, retry_times=0, retry_sleep=30, retry_handler=mock.ANY, path=mock.ANY), mock.call()(), ] # Check with newer version remote_mock.reset_mock() pg_basebackup_mock.reset_mock() backup_manager.executor._remote_status = None remote_mock.return_value = { 'pg_basebackup_version': '9.5', 'pg_basebackup_path': '/fake/path', 'pg_basebackup_bwlimit': True, } backup_manager.executor.config.immediate_checkpoint = True backup_manager.executor.config.streaming_conninfo = 'fake=connstring' backup_manager.executor.backup_copy(backup_info) out, err = capsys.readouterr() assert out == '' assert err == '' # check that the bwlimit option have been passed to the test call assert pg_basebackup_mock.mock_calls == [ mock.call(connection=mock.ANY, version='9.5', app_name='barman_streaming_backup', destination=mock.ANY, command='/fake/path', tbs_mapping=mock.ANY, bwlimit=1, immediate=True, retry_times=0, retry_sleep=30, retry_handler=mock.ANY, path=mock.ANY), mock.call()(), ] # Check with a config file outside the data directory remote_mock.reset_mock() pg_basebackup_mock.reset_mock() backup_info.ident_file = '/pg/pg_ident.conf' backup_manager.executor.backup_copy(backup_info) out, err = capsys.readouterr() assert out == '' assert err.strip() == 'WARNING: pg_basebackup does not copy ' \ 'the PostgreSQL configuration files that '\ 'reside outside PGDATA. ' \ 'Please manually backup the following files:' \ '\n\t/pg/pg_ident.conf' # check that the bwlimit option have been passed to the test call assert pg_basebackup_mock.mock_calls == [ mock.call(connection=mock.ANY, version='9.5', app_name='barman_streaming_backup', destination=mock.ANY, command='/fake/path', tbs_mapping=mock.ANY, bwlimit=1, immediate=True, retry_times=0, retry_sleep=30, retry_handler=mock.ANY, path=mock.ANY), mock.call()(), ] # Check with a config file outside the data directory and # external_configurations backup option remote_mock.reset_mock() pg_basebackup_mock.reset_mock() backup_manager.config.backup_options.add( BackupOptions.EXTERNAL_CONFIGURATION) backup_manager.executor.backup_copy(backup_info) out, err = capsys.readouterr() assert out == '' assert err == '' # check that the bwlimit option have been passed to the test call assert pg_basebackup_mock.mock_calls == [ mock.call(connection=mock.ANY, version='9.5', app_name='barman_streaming_backup', destination=mock.ANY, command='/fake/path', tbs_mapping=mock.ANY, bwlimit=1, immediate=True, retry_times=0, retry_sleep=30, retry_handler=mock.ANY, path=mock.ANY), mock.call()(), ] # Raise a test CommandFailedException and expect it to be wrapped # inside a DataTransferFailure exception remote_mock.reset_mock() pg_basebackup_mock.reset_mock() pg_basebackup_mock.return_value.side_effect = \ CommandFailedException(dict(ret='ret', out='out', err='err')) with pytest.raises(DataTransferFailure): backup_manager.executor.backup_copy(backup_info)
def test_set_pitr_targets(self, tmpdir): """ Evaluate targets for point in time recovery """ # Build basic folder/files structure tempdir = tmpdir.mkdir('temp_dir') dest = tmpdir.mkdir('dest') wal_dest = tmpdir.mkdir('wal_dest') recovery_info = { 'configuration_files': ['postgresql.conf', 'postgresql.auto.conf'], 'tempdir': tempdir.strpath, 'results': { 'changes': [], 'warnings': [] }, 'is_pitr': False, 'wal_dest': wal_dest.strpath, 'get_wal': False, } backup_info = testing_helpers.build_test_backup_info( end_time=dateutil.parser.parse('2015-06-03 16:11:01.71038+02')) backup_manager = testing_helpers.build_backup_manager() # Build a recovery executor executor = RecoveryExecutor(backup_manager) executor._set_pitr_targets(recovery_info, backup_info, dest.strpath, '', '', '', '', False, None) # Test with empty values (no PITR) assert recovery_info['target_epoch'] is None assert recovery_info['target_datetime'] is None assert recovery_info['wal_dest'] == wal_dest.strpath # Test for PITR targets executor._set_pitr_targets(recovery_info, backup_info, dest.strpath, 'target_name', '2015-06-03 16:11:03.71038+02', '2', None, False, None) target_datetime = dateutil.parser.parse( '2015-06-03 16:11:03.710380+02:00') target_epoch = (time.mktime(target_datetime.timetuple()) + (target_datetime.microsecond / 1000000.)) assert recovery_info['target_datetime'] == target_datetime assert recovery_info['target_epoch'] == target_epoch assert recovery_info['wal_dest'] == dest.join('barman_xlog').strpath # Test for too early PITR target with pytest.raises(RecoveryInvalidTargetException) as exc_info: executor._set_pitr_targets(recovery_info, backup_info, dest.strpath, None, '2015-06-03 16:11:00.71038+02', None, None, False, None) assert str(exc_info.value) == \ "The requested target time " \ "2015-06-03 16:11:00.710380+02:00 " \ "is before the backup end time " \ "2015-06-03 16:11:01.710380+02:00" # Tests for PostgreSQL < 9.1 backup_info.version = 90000 with pytest.raises(RecoveryTargetActionException) as exc_info: executor._set_pitr_targets(recovery_info, backup_info, dest.strpath, 'target_name', '2015-06-03 16:11:03.71038+02', '2', None, False, 'pause') assert str(exc_info.value) == "Illegal target action 'pause' " \ "for this version of PostgreSQL" # Tests for PostgreSQL between 9.1 and 9.4 included backup_info.version = 90100 executor._set_pitr_targets(recovery_info, backup_info, dest.strpath, 'target_name', '2015-06-03 16:11:03.71038+02', '2', None, False, None) assert 'pause_at_recovery_target' not in recovery_info executor._set_pitr_targets(recovery_info, backup_info, dest.strpath, 'target_name', '2015-06-03 16:11:03.71038+02', '2', None, False, 'pause') assert recovery_info['pause_at_recovery_target'] == "on" del recovery_info['pause_at_recovery_target'] with pytest.raises(RecoveryTargetActionException) as exc_info: executor._set_pitr_targets(recovery_info, backup_info, dest.strpath, 'target_name', '2015-06-03 16:11:03.71038+02', '2', None, False, 'promote') assert str(exc_info.value) == "Illegal target action 'promote' " \ "for this version of PostgreSQL" # Tests for PostgreSQL >= 9.5 backup_info.version = 90500 executor._set_pitr_targets(recovery_info, backup_info, dest.strpath, 'target_name', '2015-06-03 16:11:03.71038+02', '2', None, False, 'pause') assert recovery_info['recovery_target_action'] == "pause" executor._set_pitr_targets(recovery_info, backup_info, dest.strpath, 'target_name', '2015-06-03 16:11:03.71038+02', '2', None, False, 'promote') assert recovery_info['recovery_target_action'] == "promote" with pytest.raises(RecoveryTargetActionException) as exc_info: executor._set_pitr_targets(recovery_info, backup_info, dest.strpath, 'target_name', '2015-06-03 16:11:03.71038+02', '2', None, False, 'unavailable') assert str(exc_info.value) == "Illegal target action 'unavailable' " \ "for this version of PostgreSQL" # Recovery target action should not be available is PITR is not # enabled backup_info.version = 90500 with pytest.raises(RecoveryTargetActionException) as exc_info: executor._set_pitr_targets(recovery_info, backup_info, dest.strpath, None, None, None, None, False, 'pause') assert str(exc_info.value) == "Can't enable recovery target action " \ "when PITR is not required"
def test_check(self, remote_status_mock): """ Very simple and basic test for the check method :param remote_status_mock: mock for the get_remote_status method """ remote_status_mock.return_value = { 'pg_basebackup_compatible': True, 'pg_basebackup_installed': True, 'pg_basebackup_path': '/fake/path', 'pg_basebackup_bwlimit': True, 'pg_basebackup_version': '9.5', 'pg_basebackup_tbls_mapping': True, } check_strat = CheckStrategy() backup_manager = build_backup_manager( global_conf={'backup_method': 'postgres'}) backup_manager.server.postgres.server_txt_version = '9.5' backup_manager.executor.check(check_strategy=check_strat) # No errors detected assert check_strat.has_error is not True remote_status_mock.reset_mock() remote_status_mock.return_value = { 'pg_basebackup_compatible': False, 'pg_basebackup_installed': True, 'pg_basebackup_path': True, 'pg_basebackup_bwlimit': True, 'pg_basebackup_version': '9.5', 'pg_basebackup_tbls_mapping': True, } check_strat = CheckStrategy() backup_manager.executor.check(check_strategy=check_strat) # Error present because of the 'pg_basebackup_compatible': False assert check_strat.has_error is True # Even if pg_backup has no tbls_mapping option the check # succeeds if the server doesn't have any tablespaces remote_status_mock.reset_mock() remote_status_mock.return_value = { 'pg_basebackup_compatible': True, 'pg_basebackup_installed': True, 'pg_basebackup_path': True, 'pg_basebackup_bwlimit': True, 'pg_basebackup_version': '9.3', 'pg_basebackup_tbls_mapping': False, } check_strat = CheckStrategy() backup_manager.server.postgres.get_tablespaces.return_value = [] backup_manager.executor.check(check_strategy=check_strat) assert check_strat.has_error is False # This check fails because the server contains tablespaces and # pg_basebackup doesn't support the tbls_mapping option remote_status_mock.reset_mock() remote_status_mock.return_value = { 'pg_basebackup_compatible': True, 'pg_basebackup_installed': True, 'pg_basebackup_path': True, 'pg_basebackup_bwlimit': True, 'pg_basebackup_version': '9.3', 'pg_basebackup_tbls_mapping': False, } check_strat = CheckStrategy() backup_manager.server.postgres.get_tablespaces.return_value = [True] backup_manager.executor.check(check_strategy=check_strat) assert check_strat.has_error is True
def test_backup_copy(self, remote_mock, pg_basebackup_mock, tmpdir): """ Test backup folder structure :param remote_mock: mock for the fetch_remote_status method :param pg_basebackup_mock: mock for the PgBaseBackup object :param tmpdir: pytest temp directory """ backup_manager = build_backup_manager(global_conf={ 'barman_home': tmpdir.mkdir('home').strpath, 'backup_method': 'postgres' }) # simulate a old version of pg_basebackup # not supporting bandwidth_limit remote_mock.return_value = { 'pg_basebackup_version': '9.2', 'pg_basebackup_path': '/fake/path', 'pg_basebackup_bwlimit': False, } server_mock = backup_manager.server streaming_mock = server_mock.streaming server_mock.config.bandwidth_limit = 1 streaming_mock.get_connection_string.return_value = 'fake=connstring' streaming_mock.conn_parameters = { 'host': 'fakeHost', 'port': 'fakePort', 'user': '******' } backup_info = build_test_backup_info(server=backup_manager.server, backup_id='fake_backup_id') backup_manager.executor.backup_copy(backup_info) # check that the bwlimit option have been ignored assert pg_basebackup_mock.mock_calls == [ mock.call( connection=mock.ANY, version='9.2', app_name='barman_streaming_backup', destination=mock.ANY, command='/fake/path', tbs_mapping=mock.ANY, bwlimit=None, immediate=False, retry_times=0, retry_sleep=30, retry_handler=mock.ANY, path=mock.ANY), mock.call()(), ] # Check with newer version remote_mock.reset_mock() pg_basebackup_mock.reset_mock() backup_manager.executor._remote_status = None remote_mock.return_value = { 'pg_basebackup_version': '9.5', 'pg_basebackup_path': '/fake/path', 'pg_basebackup_bwlimit': True, } backup_manager.executor.config.immediate_checkpoint = True backup_manager.executor.config.streaming_conninfo = 'fake=connstring' backup_manager.executor.backup_copy(backup_info) # check that the bwlimit option have been passed to the test call assert pg_basebackup_mock.mock_calls == [ mock.call( connection=mock.ANY, version='9.5', app_name='barman_streaming_backup', destination=mock.ANY, command='/fake/path', tbs_mapping=mock.ANY, bwlimit=1, immediate=True, retry_times=0, retry_sleep=30, retry_handler=mock.ANY, path=mock.ANY), mock.call()(), ] # Raise a test CommandFailedException and expect it to be wrapped # inside a DataTransferFailure exception remote_mock.reset_mock() pg_basebackup_mock.reset_mock() pg_basebackup_mock.return_value.side_effect = \ CommandFailedException(dict(ret='ret', out='out', err='err')) with pytest.raises(DataTransferFailure): backup_manager.executor.backup_copy(backup_info)
def test_backup(self, rwbb_mock, gpb_mock, backup_copy_mock, capsys, tmpdir): """ Test the execution of a backup :param rwbb_mock: mock for the remove_wal_before_backup method :param gpb_mock: mock for the get_previous_backup method :param backup_copy_mock: mock for the executor's backup_copy method :param capsys: stdout capture module :param tmpdir: pytest temp directory """ backup_manager = build_backup_manager( global_conf={ 'barman_home': tmpdir.mkdir('home').strpath, # Silence the warning for default backup strategy 'backup_options': 'exclusive_backup', }) backup_info = LocalBackupInfo(backup_manager.server, backup_id='fake_backup_id') backup_info.begin_xlog = "0/2000028" backup_info.begin_wal = "000000010000000000000002" backup_info.begin_offset = 40 backup_info.status = BackupInfo.EMPTY backup_info.copy_stats = dict(copy_time=100) gpb_mock.return_value = None rwbb_mock.return_value = ['000000010000000000000001'] # Test 1: exclusive backup backup_manager.executor.strategy = Mock() backup_manager.executor.backup(backup_info) out, err = capsys.readouterr() assert err == '' assert ( "Backup start at LSN: " "0/2000028 (000000010000000000000002, 00000028)\n" "This is the first backup for server main\n" "WAL segments preceding the current backup have been found:\n" "\t000000010000000000000001 from server main has been removed\n" "Starting backup copy via rsync/SSH for fake_backup_id\n" "Copy done (time: 1 minute, 40 seconds)") in out gpb_mock.assert_called_with(backup_info.backup_id) rwbb_mock.assert_called_with(backup_info) backup_manager.executor.strategy.start_backup.assert_called_once_with( backup_info) backup_copy_mock.assert_called_once_with(backup_info) backup_manager.executor.strategy.stop_backup.assert_called_once_with( backup_info) # Test 2: concurrent backup # change the configuration to concurrent backup backup_manager.executor.config.backup_options = [ BackupOptions.CONCURRENT_BACKUP ] # reset mocks gpb_mock.reset_mock() rwbb_mock.reset_mock() backup_manager.executor.strategy.reset_mock() backup_copy_mock.reset_mock() # prepare data directory for backup_label generation backup_info.backup_label = 'test\nlabel\n' backup_manager.executor.backup(backup_info) out, err = capsys.readouterr() assert err == '' assert ( "Backup start at LSN: " "0/2000028 (000000010000000000000002, 00000028)\n" "This is the first backup for server main\n" "WAL segments preceding the current backup have been found:\n" "\t000000010000000000000001 from server main has been removed\n" "Starting backup copy via rsync/SSH for fake_backup_id\n" "Copy done (time: 1 minute, 40 seconds)") in out gpb_mock.assert_called_with(backup_info.backup_id) rwbb_mock.assert_called_with(backup_info) backup_manager.executor.strategy.start_backup.assert_called_once_with( backup_info) backup_copy_mock.assert_called_once_with(backup_info) backup_manager.executor.strategy.start_backup.assert_called_once_with( backup_info)
def test_backup(self, gpb_mock, pbc_mock, capsys, tmpdir): """ Test backup :param gpb_mock: mock for the get_previous_backup method :param pbc_mock: mock for the backup_copy method :param capsys: stdout capture module :param tmpdir: pytest temp directory """ tmp_home = tmpdir.mkdir('home') backup_manager = build_backup_manager(global_conf={ 'barman_home': tmp_home.strpath, 'backup_method': 'postgres' }) backup_info = build_test_backup_info( backup_id='fake_backup_id', server=backup_manager.server, pgdata="/pg/data", config_file="/pg/data/postgresql.conf", hba_file="/pg/data/pg_hba.conf", ident_file="/pg/pg_ident.conf", begin_offset=28) timestamp = datetime.datetime(2015, 10, 26, 14, 38) backup_manager.server.postgres.current_xlog_info = dict( location='0/12000090', file_name='000000010000000000000012', file_offset=144, timestamp=timestamp, ) backup_manager.server.postgres.get_setting.return_value = '/pg/data' tmp_backup_label = tmp_home.mkdir('main')\ .mkdir('base').mkdir('fake_backup_id')\ .mkdir('data').join('backup_label') start_time = datetime.datetime.now(tz.tzlocal()).replace(microsecond=0) tmp_backup_label.write( 'START WAL LOCATION: 0/40000028 (file 000000010000000000000040)\n' 'CHECKPOINT LOCATION: 0/40000028\n' 'BACKUP METHOD: streamed\n' 'BACKUP FROM: master\n' 'START TIME: %s\n' 'LABEL: pg_basebackup base backup' % start_time.strftime('%Y-%m-%d %H:%M:%S %Z') ) backup_manager.executor.backup(backup_info) out, err = capsys.readouterr() gpb_mock.assert_called_once_with(backup_info.backup_id) assert err.strip() == 'WARNING: pg_basebackup does not copy ' \ 'the PostgreSQL configuration files that '\ 'reside outside PGDATA. ' \ 'Please manually backup the following files:' \ '\n\t/pg/pg_ident.conf' assert 'Copying files.' in out assert 'Copy done.' in out assert 'Finalising the backup.' in out assert backup_info.end_xlog == '0/12000090' assert backup_info.end_offset == 144 assert backup_info.begin_time == start_time assert backup_info.begin_wal == '000000010000000000000040' # Check the CommandFailedException re raising with pytest.raises(CommandFailedException): pbc_mock.side_effect = CommandFailedException('test') backup_manager.executor.backup(backup_info)
def test_recovery_post_script(self, command_mock): """ Unit test specific for the execution of a post recovery script. test case: simulate the execution of a post recovery script, should return 0 test the environment for the HookScriptRunner obj. test the name of the fake script, should be the same as the one in the mocked configuration """ # BackupManager mock backup_manager = build_backup_manager(name='test_server') backup_manager.config.post_recovery_script = \ 'test_recovery_post_script' backup_manager.get_previous_backup = MagicMock() backup_manager.get_previous_backup.side_effect = \ UnknownBackupIdException() backup_manager.get_next_backup = MagicMock() backup_manager.get_next_backup.side_effect = \ UnknownBackupIdException() # BackupInfo mock backup_info = MagicMock(name='backup_info') backup_info.get_basebackup_directory.return_value = 'backup_directory' backup_info.backup_id = '123456789XYZ' backup_info.error = None backup_info.status = 'OK' # Command mock executed by HookScriptRunner command_mock.return_value.return_value = 0 # the actual test script = HookScriptRunner(backup_manager, 'recovery_script', 'post') script.env_from_recover( backup_info, dest='local_dest', tablespaces=None, remote_command=None ) expected_env = { 'BARMAN_PHASE': 'post', 'BARMAN_VERSION': version, 'BARMAN_SERVER': 'test_server', 'BARMAN_CONFIGURATION': 'build_config_from_dicts', 'BARMAN_HOOK': 'recovery_script', 'BARMAN_BACKUP_DIR': 'backup_directory', 'BARMAN_BACKUP_ID': '123456789XYZ', 'BARMAN_ERROR': '', 'BARMAN_STATUS': 'OK', 'BARMAN_PREVIOUS_ID': '', 'BARMAN_NEXT_ID': '', 'BARMAN_RETRY': '0', 'BARMAN_DESTINATION_DIRECTORY': 'local_dest', 'BARMAN_TABLESPACES': '', 'BARMAN_REMOTE_COMMAND': '', 'BARMAN_RECOVER_OPTIONS': '' } assert script.run() == 0 assert command_mock.call_count == 1 assert command_mock.call_args[1]['env_append'] == expected_env assert script.script == backup_manager.config.post_recovery_script