def test_resolve_archive_name_without_archives_raises(): flexmock(module).should_receive('execute_command').with_args( ('borg', 'list') + BORG_LIST_LATEST_ARGUMENTS, output_log_level=None, borg_local_path='borg' ).and_return('') with pytest.raises(ValueError): module.resolve_archive_name('repo', 'latest', storage_config={})
def test_resolve_archive_name_calls_borg_with_parameters(): expected_archive = 'archive-name' flexmock(module).should_receive('execute_command').with_args( ('borg', 'list') + BORG_LIST_LATEST_ARGUMENTS, output_log_level=None, borg_local_path='borg' ).and_return(expected_archive + '\n') assert module.resolve_archive_name('repo', 'latest', storage_config={}) == expected_archive
def test_resolve_archive_name_with_local_path_calls_borg_via_local_path(): expected_archive = 'archive-name' flexmock(module).should_receive('execute_command').with_args( ('borg1', 'list') + BORG_LIST_LATEST_ARGUMENTS, output_log_level=None, error_on_warnings=False, ).and_return(expected_archive + '\n') assert (module.resolve_archive_name( 'repo', 'latest', storage_config={}, local_path='borg1') == expected_archive)
def test_resolve_archive_name_with_log_debug_calls_borg_with_debug_parameter(): expected_archive = 'archive-name' flexmock(module).should_receive('execute_command').with_args( ('borg', 'list', '--debug', '--show-rc') + BORG_LIST_LATEST_ARGUMENTS, output_log_level=None, error_on_warnings=False, ).and_return(expected_archive + '\n') insert_logging_mock(logging.DEBUG) assert module.resolve_archive_name('repo', 'latest', storage_config={}) == expected_archive
def test_resolve_archive_name_with_lock_wait_calls_borg_with_lock_wait_parameters( ): expected_archive = 'archive-name' flexmock(module).should_receive('execute_command').with_args( ('borg', 'list', '--lock-wait', 'okay') + BORG_LIST_LATEST_ARGUMENTS, output_log_level=None, error_on_warnings=False, ).and_return(expected_archive + '\n') assert (module.resolve_archive_name('repo', 'latest', storage_config={'lock_wait': 'okay' }) == expected_archive)
def run_actions(*, arguments, location, storage, retention, consistency, hooks, local_path, remote_path, repository_path): # pragma: no cover ''' Given parsed command-line arguments as an argparse.ArgumentParser instance, several different configuration dicts, local and remote paths to Borg, and a repository name, run all actions from the command-line arguments on the given repository. Yield JSON output strings from executing any actions that produce JSON. Raise OSError or subprocess.CalledProcessError if an error occurs running a command for an action. Raise ValueError if the arguments or configuration passed to action are invalid. ''' repository = os.path.expanduser(repository_path) global_arguments = arguments['global'] dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else '' if 'init' in arguments: logger.info('{}: Initializing repository'.format(repository)) borg_init.initialize_repository( repository, storage, arguments['init'].encryption_mode, arguments['init'].append_only, arguments['init'].storage_quota, local_path=local_path, remote_path=remote_path, ) if 'prune' in arguments: logger.info('{}: Pruning archives{}'.format(repository, dry_run_label)) borg_prune.prune_archives( global_arguments.dry_run, repository, storage, retention, local_path=local_path, remote_path=remote_path, stats=arguments['prune'].stats, files=arguments['prune'].files, ) if 'create' in arguments: logger.info('{}: Creating archive{}'.format(repository, dry_run_label)) json_output = borg_create.create_archive( global_arguments.dry_run, repository, location, storage, local_path=local_path, remote_path=remote_path, progress=arguments['create'].progress, stats=arguments['create'].stats, json=arguments['create'].json, files=arguments['create'].files, ) if json_output: yield json.loads(json_output) if 'check' in arguments and checks.repository_enabled_for_checks( repository, consistency): logger.info('{}: Running consistency checks'.format(repository)) borg_check.check_archives( repository, storage, consistency, local_path=local_path, remote_path=remote_path, progress=arguments['check'].progress, repair=arguments['check'].repair, only_checks=arguments['check'].only, ) if 'extract' in arguments: if arguments[ 'extract'].repository is None or validate.repositories_match( repository, arguments['extract'].repository): logger.info('{}: Extracting archive {}'.format( repository, arguments['extract'].archive)) borg_extract.extract_archive( global_arguments.dry_run, repository, borg_list.resolve_archive_name(repository, arguments['extract'].archive, storage, local_path, remote_path), arguments['extract'].paths, location, storage, local_path=local_path, remote_path=remote_path, destination_path=arguments['extract'].destination, progress=arguments['extract'].progress, ) if 'mount' in arguments: if arguments['mount'].repository is None or validate.repositories_match( repository, arguments['mount'].repository): if arguments['mount'].archive: logger.info('{}: Mounting archive {}'.format( repository, arguments['mount'].archive)) else: logger.info('{}: Mounting repository'.format(repository)) borg_mount.mount_archive( repository, borg_list.resolve_archive_name(repository, arguments['mount'].archive, storage, local_path, remote_path), arguments['mount'].mount_point, arguments['mount'].paths, arguments['mount'].foreground, arguments['mount'].options, storage, local_path=local_path, remote_path=remote_path, ) if 'restore' in arguments: if arguments[ 'restore'].repository is None or validate.repositories_match( repository, arguments['restore'].repository): logger.info('{}: Restoring databases from archive {}'.format( repository, arguments['restore'].archive)) restore_names = arguments['restore'].databases or [] if 'all' in restore_names: restore_names = [] # Extract dumps for the named databases from the archive. dump_patterns = dispatch.call_hooks( 'make_database_dump_patterns', hooks, repository, dump.DATABASE_HOOK_NAMES, location, restore_names, ) borg_extract.extract_archive( global_arguments.dry_run, repository, borg_list.resolve_archive_name(repository, arguments['restore'].archive, storage, local_path, remote_path), dump.convert_glob_patterns_to_borg_patterns( dump.flatten_dump_patterns(dump_patterns, restore_names)), location, storage, local_path=local_path, remote_path=remote_path, destination_path='/', progress=arguments['restore'].progress, # We don't want glob patterns that don't match to error. error_on_warnings=False, ) # Map the restore names or detected dumps to the corresponding database configurations. restore_databases = dump.get_per_hook_database_configurations( hooks, restore_names, dump_patterns) # Finally, restore the databases and cleanup the dumps. dispatch.call_hooks( 'restore_database_dumps', restore_databases, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) dispatch.call_hooks( 'remove_database_dumps', restore_databases, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) if 'list' in arguments: if arguments['list'].repository is None or validate.repositories_match( repository, arguments['list'].repository): list_arguments = copy.copy(arguments['list']) if not list_arguments.json: logger.warning('{}: Listing archives'.format(repository)) list_arguments.archive = borg_list.resolve_archive_name( repository, list_arguments.archive, storage, local_path, remote_path) json_output = borg_list.list_archives( repository, storage, list_arguments=list_arguments, local_path=local_path, remote_path=remote_path, ) if json_output: yield json.loads(json_output) if 'info' in arguments: if arguments['info'].repository is None or validate.repositories_match( repository, arguments['info'].repository): info_arguments = copy.copy(arguments['info']) if not info_arguments.json: logger.warning( '{}: Displaying summary info for archives'.format( repository)) info_arguments.archive = borg_list.resolve_archive_name( repository, info_arguments.archive, storage, local_path, remote_path) json_output = borg_info.display_archives_info( repository, storage, info_arguments=info_arguments, local_path=local_path, remote_path=remote_path, ) if json_output: yield json.loads(json_output)
def run_actions(*, arguments, location, storage, retention, consistency, hooks, local_path, remote_path, repository_path): # pragma: no cover ''' Given parsed command-line arguments as an argparse.ArgumentParser instance, several different configuration dicts, local and remote paths to Borg, and a repository name, run all actions from the command-line arguments on the given repository. Yield JSON output strings from executing any actions that produce JSON. Raise OSError or subprocess.CalledProcessError if an error occurs running a command for an action. Raise ValueError if the arguments or configuration passed to action are invalid. ''' repository = os.path.expanduser(repository_path) global_arguments = arguments['global'] dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else '' if 'init' in arguments: logger.info('{}: Initializing repository'.format(repository)) borg_init.initialize_repository( repository, storage, arguments['init'].encryption_mode, arguments['init'].append_only, arguments['init'].storage_quota, local_path=local_path, remote_path=remote_path, ) if 'prune' in arguments: logger.info('{}: Pruning archives{}'.format(repository, dry_run_label)) borg_prune.prune_archives( global_arguments.dry_run, repository, storage, retention, local_path=local_path, remote_path=remote_path, stats=arguments['prune'].stats, files=arguments['prune'].files, ) if 'create' in arguments: logger.info('{}: Creating archive{}'.format(repository, dry_run_label)) dispatch.call_hooks( 'remove_database_dumps', hooks, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) active_dumps = dispatch.call_hooks( 'dump_databases', hooks, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) stream_processes = [ process for processes in active_dumps.values() for process in processes ] json_output = borg_create.create_archive( global_arguments.dry_run, repository, location, storage, local_path=local_path, remote_path=remote_path, progress=arguments['create'].progress, stats=arguments['create'].stats, json=arguments['create'].json, files=arguments['create'].files, stream_processes=stream_processes, ) if json_output: yield json.loads(json_output) if 'check' in arguments and checks.repository_enabled_for_checks( repository, consistency): logger.info('{}: Running consistency checks'.format(repository)) borg_check.check_archives( repository, storage, consistency, local_path=local_path, remote_path=remote_path, progress=arguments['check'].progress, repair=arguments['check'].repair, only_checks=arguments['check'].only, ) if 'extract' in arguments: if arguments[ 'extract'].repository is None or validate.repositories_match( repository, arguments['extract'].repository): logger.info('{}: Extracting archive {}'.format( repository, arguments['extract'].archive)) borg_extract.extract_archive( global_arguments.dry_run, repository, borg_list.resolve_archive_name(repository, arguments['extract'].archive, storage, local_path, remote_path), arguments['extract'].paths, location, storage, local_path=local_path, remote_path=remote_path, destination_path=arguments['extract'].destination, strip_components=arguments['extract'].strip_components, progress=arguments['extract'].progress, ) if 'mount' in arguments: if arguments['mount'].repository is None or validate.repositories_match( repository, arguments['mount'].repository): if arguments['mount'].archive: logger.info('{}: Mounting archive {}'.format( repository, arguments['mount'].archive)) else: logger.info('{}: Mounting repository'.format(repository)) borg_mount.mount_archive( repository, borg_list.resolve_archive_name(repository, arguments['mount'].archive, storage, local_path, remote_path), arguments['mount'].mount_point, arguments['mount'].paths, arguments['mount'].foreground, arguments['mount'].options, storage, local_path=local_path, remote_path=remote_path, ) if 'restore' in arguments: if arguments[ 'restore'].repository is None or validate.repositories_match( repository, arguments['restore'].repository): logger.info('{}: Restoring databases from archive {}'.format( repository, arguments['restore'].archive)) dispatch.call_hooks( 'remove_database_dumps', hooks, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) restore_names = arguments['restore'].databases or [] if 'all' in restore_names: restore_names = [] archive_name = borg_list.resolve_archive_name( repository, arguments['restore'].archive, storage, local_path, remote_path) found_names = set() for hook_name, per_hook_restore_databases in hooks.items(): if hook_name not in dump.DATABASE_HOOK_NAMES: continue for restore_database in per_hook_restore_databases: database_name = restore_database['name'] if restore_names and database_name not in restore_names: continue found_names.add(database_name) dump_pattern = dispatch.call_hooks( 'make_database_dump_pattern', hooks, repository, dump.DATABASE_HOOK_NAMES, location, database_name, )[hook_name] # Kick off a single database extract to stdout. extract_process = borg_extract.extract_archive( dry_run=global_arguments.dry_run, repository=repository, archive=archive_name, paths=dump.convert_glob_patterns_to_borg_patterns( [dump_pattern]), location_config=location, storage_config=storage, local_path=local_path, remote_path=remote_path, destination_path='/', # A directory format dump isn't a single file, and therefore can't extract # to stdout. In this case, the extract_process return value is None. extract_to_stdout=bool( restore_database.get('format') != 'directory'), ) # Run a single database restore, consuming the extract stdout (if any). dispatch.call_hooks( 'restore_database_dump', {hook_name: [restore_database]}, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, extract_process, ) dispatch.call_hooks( 'remove_database_dumps', hooks, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) if not restore_names and not found_names: raise ValueError('No databases were found to restore') missing_names = sorted(set(restore_names) - found_names) if missing_names: raise ValueError( 'Cannot restore database(s) {} missing from borgmatic\'s configuration' .format(', '.join(missing_names))) if 'list' in arguments: if arguments['list'].repository is None or validate.repositories_match( repository, arguments['list'].repository): list_arguments = copy.copy(arguments['list']) if not list_arguments.json: logger.warning('{}: Listing archives'.format(repository)) list_arguments.archive = borg_list.resolve_archive_name( repository, list_arguments.archive, storage, local_path, remote_path) json_output = borg_list.list_archives( repository, storage, list_arguments=list_arguments, local_path=local_path, remote_path=remote_path, ) if json_output: yield json.loads(json_output) if 'info' in arguments: if arguments['info'].repository is None or validate.repositories_match( repository, arguments['info'].repository): info_arguments = copy.copy(arguments['info']) if not info_arguments.json: logger.warning( '{}: Displaying summary info for archives'.format( repository)) info_arguments.archive = borg_list.resolve_archive_name( repository, info_arguments.archive, storage, local_path, remote_path) json_output = borg_info.display_archives_info( repository, storage, info_arguments=info_arguments, local_path=local_path, remote_path=remote_path, ) if json_output: yield json.loads(json_output)
def test_resolve_archive_name_passes_through_non_latest_archive_name(): archive = 'myhost-2030-01-01T14:41:17.647620' assert module.resolve_archive_name('repo', archive, storage_config={}) == archive