def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters( ): checks = ('repository', ) check_last = flexmock() consistency_config = {'check_last': check_last} flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').with_args( checks, check_last).and_return(()) stdout = flexmock() insert_subprocess_mock( ('borg', 'check', 'repo', '--prefix', '{hostname}-', '--remote-path', 'borg1'), stdout=stdout, stderr=STDOUT, ) flexmock(sys.modules['builtins']).should_receive('open').and_return(stdout) flexmock(module.os).should_receive('devnull') module.check_archives( verbosity=None, repository='repo', storage_config={}, consistency_config=consistency_config, remote_path='borg1', )
def test_check_archives_without_any_checks_bails(): consistency_config = {'check_last': None} flexmock(module).should_receive('_parse_checks').and_return(()) insert_subprocess_never() module.check_archives( repository='repo', storage_config={}, consistency_config=consistency_config )
def test_check_archives_without_any_checks_bails(): consistency_config = {'check_last': None} flexmock(module).should_receive('_parse_checks').and_return(()) insert_subprocess_never() module.check_archives(repository='repo', storage_config={}, consistency_config=consistency_config)
def _run_commands_on_repository(*, args, consistency, json_results, local_path, location, remote_path, retention, storage, unexpanded_repository): # pragma: no cover repository = os.path.expanduser(unexpanded_repository) dry_run_label = ' (dry run; not making any changes)' if args.dry_run else '' if args.prune: logger.info('{}: Pruning archives{}'.format(repository, dry_run_label)) borg_prune.prune_archives( args.dry_run, repository, storage, retention, local_path=local_path, remote_path=remote_path, ) if args.create: logger.info('{}: Creating archive{}'.format(repository, dry_run_label)) borg_create.create_archive( args.dry_run, repository, location, storage, local_path=local_path, remote_path=remote_path, ) if args.check and checks.repository_enabled_for_checks( repository, consistency): logger.info('{}: Running consistency checks'.format(repository)) borg_check.check_archives(repository, storage, consistency, local_path=local_path, remote_path=remote_path) if args.list: logger.info('{}: Listing archives'.format(repository)) output = borg_list.list_archives(repository, storage, local_path=local_path, remote_path=remote_path, json=args.json) if args.json: json_results.append(json.loads(output)) else: sys.stdout.write(output) if args.info: logger.info( '{}: Displaying summary info for archives'.format(repository)) output = borg_info.display_archives_info(repository, storage, local_path=local_path, remote_path=remote_path, json=args.json) if args.json: json_results.append(json.loads(output)) else: sys.stdout.write(output)
def test_check_archives_without_any_checks_should_bail(): consistency_config = flexmock().should_receive('get').and_return(None).mock flexmock(module).should_receive('_parse_checks').and_return(()) insert_subprocess_never() module.check_archives( verbosity=None, repository='repo', consistency_config=consistency_config, )
def test_check_archives_with_log_info_calls_borg_with_info_parameter(): checks = ('repository', ) consistency_config = {'check_last': None} flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').and_return(()) insert_logging_mock(logging.INFO) insert_execute_command_mock(('borg', 'check', 'repo', '--info')) module.check_archives(repository='repo', storage_config={}, consistency_config=consistency_config)
def test_check_archives_calls_borg_with_parameters(checks): check_last = flexmock() consistency_config = {'check_last': check_last} flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').with_args( checks, check_last, module.DEFAULT_PREFIX).and_return(()) insert_execute_command_mock(('borg', 'check', 'repo')) module.check_archives(repository='repo', storage_config={}, consistency_config=consistency_config)
def test_check_archives_with_log_info_calls_borg_with_info_parameter(): checks = ('repository',) consistency_config = {'check_last': None} flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').and_return(()) insert_logging_mock(logging.INFO) insert_subprocess_mock(('borg', 'check', 'repo', '--info'), stdout=None, stderr=STDOUT) module.check_archives( repository='repo', storage_config={}, consistency_config=consistency_config )
def test_check_archives_with_extra_borg_options_calls_borg_with_extra_options(): checks = ('repository',) consistency_config = {'check_last': None} flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').and_return(()) insert_execute_command_mock(('borg', 'check', '--extra', '--options', 'repo')) module.check_archives( repository='repo', storage_config={'extra_borg_options': {'check': '--extra --options'}}, consistency_config=consistency_config, )
def test_check_archives_with_extract_check_calls_extract_only(): checks = ('extract',) check_last = flexmock() consistency_config = {'check_last': check_last} flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').never() flexmock(module.extract).should_receive('extract_last_archive_dry_run').once() insert_execute_command_never() module.check_archives( repository='repo', storage_config={}, consistency_config=consistency_config )
def test_check_archives_with_extract_check_calls_extract_only(): checks = ('extract',) check_last = flexmock() consistency_config = {'check_last': check_last} flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').never() flexmock(module.extract).should_receive('extract_last_archive_dry_run').once() insert_subprocess_never() module.check_archives( repository='repo', storage_config={}, consistency_config=consistency_config )
def test_check_archives_with_retention_prefix(): checks = ('repository', ) check_last = flexmock() prefix = 'foo-' consistency_config = {'check_last': check_last, 'prefix': prefix} flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').with_args( checks, check_last, prefix).and_return(()) insert_execute_command_mock(('borg', 'check', 'repo')) module.check_archives(repository='repo', storage_config={}, consistency_config=consistency_config)
def test_check_archives_with_log_debug_calls_borg_with_debug_parameter(): checks = ('repository', ) consistency_config = {'check_last': None} flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').and_return(()) insert_logging_mock(logging.DEBUG) insert_subprocess_mock(('borg', 'check', 'repo', '--debug', '--show-rc'), stdout=None, stderr=STDOUT) module.check_archives(repository='repo', storage_config={}, consistency_config=consistency_config)
def test_check_archives_with_repair_calls_borg_with_repair_parameter(): checks = ('repository',) consistency_config = {'check_last': None} flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').and_return(()) flexmock(module).should_receive('execute_command').never() flexmock(module).should_receive('execute_command').with_args( ('borg', 'check', '--repair', 'repo'), output_file=module.DO_NOT_CAPTURE ).once() module.check_archives( repository='repo', storage_config={}, consistency_config=consistency_config, repair=True )
def test_check_archives_with_lock_wait_calls_borg_with_lock_wait_parameters(): checks = ('repository',) check_last = flexmock() consistency_config = {'check_last': check_last} flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').with_args( checks, check_last, None ).and_return(()) insert_execute_command_mock(('borg', 'check', 'repo', '--lock-wait', '5')) module.check_archives( repository='repo', storage_config={'lock_wait': 5}, consistency_config=consistency_config )
def test_check_archives_with_extract_check_should_call_extract_only(): checks = ('extract',) check_last = flexmock() consistency_config = flexmock().should_receive('get').and_return(check_last).mock flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').never() flexmock(module.extract).should_receive('extract_last_archive_dry_run').once() insert_subprocess_never() module.check_archives( verbosity=None, repository='repo', consistency_config=consistency_config, )
def test_check_archives_with_verbosity_lots_should_call_borg_with_debug_parameter(): checks = ('repository',) consistency_config = flexmock().should_receive('get').and_return(None).mock flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').and_return(()) insert_subprocess_mock( ('borg', 'check', 'repo', '--debug'), stdout=None, stderr=STDOUT, ) module.check_archives( verbosity=VERBOSITY_LOTS, repository='repo', consistency_config=consistency_config, )
def test_check_archives_with_progress_calls_borg_with_progress_parameter(): checks = ('repository', ) consistency_config = {'check_last': None} flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').and_return(()) flexmock(module).should_receive('execute_command').never() flexmock(module).should_receive( 'execute_command_without_capture').with_args( ('borg', 'check', '--progress', 'repo'), error_on_warnings=True).once() module.check_archives(repository='repo', storage_config={}, consistency_config=consistency_config, progress=True)
def test_check_archives_calls_borg_with_parameters(checks): check_last = flexmock() consistency_config = {'check_last': check_last} flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').with_args( checks, check_last, None ).and_return(()) stdout = flexmock() insert_subprocess_mock(('borg', 'check', 'repo'), stdout=stdout, stderr=STDOUT) flexmock(sys.modules['builtins']).should_receive('open').and_return(stdout) flexmock(module.os).should_receive('devnull') module.check_archives( repository='repo', storage_config={}, consistency_config=consistency_config )
def test_check_archives_with_extract_check_should_call_extract_only(): checks = ('extract', ) check_last = flexmock() consistency_config = flexmock().should_receive('get').and_return( check_last).mock flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').never() flexmock( module.extract).should_receive('extract_last_archive_dry_run').once() insert_subprocess_never() module.check_archives( verbosity=None, repository='repo', consistency_config=consistency_config, )
def test_check_archives_with_local_path_calls_borg_via_local_path(): checks = ('repository',) check_last = flexmock() consistency_config = {'check_last': check_last} flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').with_args( checks, check_last, None ).and_return(()) insert_execute_command_mock(('borg1', 'check', 'repo')) module.check_archives( repository='repo', storage_config={}, consistency_config=consistency_config, local_path='borg1', )
def test_check_archives_calls_borg_with_parameters(checks): check_last = flexmock() consistency_config = {'check_last': check_last} flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').with_args( checks, check_last, None).and_return(()) stdout = flexmock() insert_subprocess_mock(('borg', 'check', 'repo'), stdout=stdout, stderr=STDOUT) flexmock(sys.modules['builtins']).should_receive('open').and_return(stdout) flexmock(module.os).should_receive('devnull') module.check_archives(repository='repo', storage_config={}, consistency_config=consistency_config)
def test_check_archives_with_verbosity_lots_should_call_borg_with_debug_parameter( ): checks = ('repository', ) consistency_config = flexmock().should_receive('get').and_return(None).mock flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').and_return(()) insert_subprocess_mock( ('borg', 'check', 'repo', '--debug'), stdout=None, stderr=STDOUT, ) module.check_archives( verbosity=VERBOSITY_LOTS, repository='repo', consistency_config=consistency_config, )
def test_check_archives_with_verbosity_lots_calls_borg_with_debug_parameter(): checks = ('repository', ) consistency_config = {'check_last': None} flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').and_return(()) insert_subprocess_mock( ('borg', 'check', 'repo', '--prefix', '{hostname}-', '--debug'), stdout=None, stderr=STDOUT, ) module.check_archives( verbosity=VERBOSITY_LOTS, repository='repo', storage_config={}, consistency_config=consistency_config, )
def main(): # pragma: no cover try: args = parse_arguments(*sys.argv[1:]) config_filenames = tuple( collect.collect_config_filenames(args.config_paths)) convert.guard_configuration_upgraded(LEGACY_CONFIG_PATH, config_filenames) if len(config_filenames) == 0: raise ValueError( 'Error: No configuration files found in: {}'.format(' '.join( args.config_paths))) for config_filename in config_filenames: config = validate.parse_configuration(config_filename, validate.schema_filename()) (location, storage, retention, consistency) = (config.get(section_name, {}) for section_name in ('location', 'storage', 'retention', 'consistency')) remote_path = location.get('remote_path') create.initialize(storage) for repository in location['repositories']: if args.prune: prune.prune_archives(args.verbosity, repository, retention, remote_path=remote_path) if args.create: create.create_archive( args.verbosity, repository, location, storage, ) if args.check: check.check_archives(args.verbosity, repository, consistency, remote_path=remote_path) except (ValueError, OSError, CalledProcessError) as error: print(error, file=sys.stderr) sys.exit(1)
def test_check_archives_with_retention_prefix(): checks = ('repository',) check_last = flexmock() prefix = 'foo-' consistency_config = {'check_last': check_last, 'prefix': prefix} flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').with_args( checks, check_last, prefix ).and_return(()) stdout = flexmock() insert_subprocess_mock(('borg', 'check', 'repo'), stdout=stdout, stderr=STDOUT) flexmock(sys.modules['builtins']).should_receive('open').and_return(stdout) flexmock(module.os).should_receive('devnull') module.check_archives( repository='repo', storage_config={}, consistency_config=consistency_config )
def test_check_archives_with_retention_prefix(): checks = ('repository', ) check_last = flexmock() prefix = 'foo-' consistency_config = {'check_last': check_last, 'prefix': prefix} flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').with_args( checks, check_last, prefix).and_return(()) stdout = flexmock() insert_subprocess_mock(('borg', 'check', 'repo'), stdout=stdout, stderr=STDOUT) flexmock(sys.modules['builtins']).should_receive('open').and_return(stdout) flexmock(module.os).should_receive('devnull') module.check_archives(repository='repo', storage_config={}, consistency_config=consistency_config)
def test_check_archives_with_remote_path_should_call_borg_with_remote_path_parameters(): checks = ('repository',) check_last = flexmock() consistency_config = flexmock().should_receive('get').and_return(check_last).mock flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_make_check_flags').with_args(checks, check_last).and_return(()) stdout = flexmock() insert_subprocess_mock( ('borg', 'check', 'repo', '--remote-path', 'borg1'), stdout=stdout, stderr=STDOUT, ) flexmock(sys.modules['builtins']).should_receive('open').and_return(stdout) flexmock(module.os).should_receive('devnull') module.check_archives( verbosity=None, repository='repo', consistency_config=consistency_config, remote_path='borg1', )
def run_configuration(config_filename, args): # pragma: no cover ''' Parse a single configuration file, and execute its defined pruning, backups, and/or consistency checks. ''' logger.info('{}: Parsing configuration file'.format(config_filename)) config = validate.parse_configuration(config_filename, validate.schema_filename()) (location, storage, retention, consistency, hooks) = ( config.get(section_name, {}) for section_name in ('location', 'storage', 'retention', 'consistency', 'hooks') ) try: remote_path = location.get('remote_path') create.initialize_environment(storage) hook.execute_hook(hooks.get('before_backup'), config_filename, 'pre-backup') for unexpanded_repository in location['repositories']: repository = os.path.expanduser(unexpanded_repository) if args.prune: logger.info('{}: Pruning archives'.format(repository)) prune.prune_archives(args.verbosity, repository, retention, remote_path=remote_path) if args.create: logger.info('{}: Creating archive'.format(repository)) create.create_archive( args.verbosity, repository, location, storage, ) if args.check: logger.info('{}: Running consistency checks'.format(repository)) check.check_archives(args.verbosity, repository, consistency, remote_path=remote_path) hook.execute_hook(hooks.get('after_backup'), config_filename, 'post-backup') except (OSError, CalledProcessError): hook.execute_hook(hooks.get('on_error'), config_filename, 'on-error') raise
def run_actions( *, args, location, storage, retention, consistency, local_path, remote_path, repository_path ): # pragma: no cover ''' Given parsed command-line arguments as an argparse.ArgumentParser instance, several different configuration dicts, local and remote paths to Borg, and a repository name, run all actions from the command-line arguments on the given repository. Yield JSON output strings from executing any actions that produce JSON. ''' repository = os.path.expanduser(repository_path) dry_run_label = ' (dry run; not making any changes)' if args.dry_run else '' if args.init: logger.info('{}: Initializing repository'.format(repository)) borg_init.initialize_repository( repository, args.encryption_mode, args.append_only, args.storage_quota, local_path=local_path, remote_path=remote_path, ) if args.prune: logger.info('{}: Pruning archives{}'.format(repository, dry_run_label)) borg_prune.prune_archives( args.dry_run, repository, storage, retention, local_path=local_path, remote_path=remote_path, stats=args.stats, ) if args.create: logger.info('{}: Creating archive{}'.format(repository, dry_run_label)) json_output = borg_create.create_archive( args.dry_run, repository, location, storage, local_path=local_path, remote_path=remote_path, progress=args.progress, stats=args.stats, json=args.json, ) if json_output: yield json.loads(json_output) if args.check and checks.repository_enabled_for_checks(repository, consistency): logger.info('{}: Running consistency checks'.format(repository)) borg_check.check_archives( repository, storage, consistency, local_path=local_path, remote_path=remote_path ) if args.extract: if args.repository is None or repository == args.repository: logger.info('{}: Extracting archive {}'.format(repository, args.archive)) borg_extract.extract_archive( args.dry_run, repository, args.archive, args.restore_paths, location, storage, local_path=local_path, remote_path=remote_path, progress=args.progress, ) if args.list: if args.repository is None or repository == args.repository: logger.info('{}: Listing archives'.format(repository)) json_output = borg_list.list_archives( repository, storage, args.archive, local_path=local_path, remote_path=remote_path, json=args.json, ) if json_output: yield json.loads(json_output) if args.info: logger.info('{}: Displaying summary info for archives'.format(repository)) json_output = borg_info.display_archives_info( repository, storage, local_path=local_path, remote_path=remote_path, json=args.json ) if json_output: yield json.loads(json_output)
def run_actions(*, arguments, location, storage, retention, consistency, local_path, remote_path, repository_path): # pragma: no cover ''' Given parsed command-line arguments as an argparse.ArgumentParser instance, several different configuration dicts, local and remote paths to Borg, and a repository name, run all actions from the command-line arguments on the given repository. Yield JSON output strings from executing any actions that produce JSON. ''' repository = os.path.expanduser(repository_path) global_arguments = arguments['global'] dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else '' if 'init' in arguments: logger.info('{}: Initializing repository'.format(repository)) borg_init.initialize_repository( repository, arguments['init'].encryption_mode, arguments['init'].append_only, arguments['init'].storage_quota, local_path=local_path, remote_path=remote_path, ) if 'prune' in arguments: logger.info('{}: Pruning archives{}'.format(repository, dry_run_label)) borg_prune.prune_archives( global_arguments.dry_run, repository, storage, retention, local_path=local_path, remote_path=remote_path, stats=arguments['prune'].stats, ) if 'create' in arguments: logger.info('{}: Creating archive{}'.format(repository, dry_run_label)) json_output = borg_create.create_archive( global_arguments.dry_run, repository, location, storage, local_path=local_path, remote_path=remote_path, progress=arguments['create'].progress, stats=arguments['create'].stats, json=arguments['create'].json, ) if json_output: yield json.loads(json_output) if 'check' in arguments and checks.repository_enabled_for_checks( repository, consistency): logger.info('{}: Running consistency checks'.format(repository)) borg_check.check_archives( repository, storage, consistency, local_path=local_path, remote_path=remote_path, only_checks=arguments['check'].only, ) if 'extract' in arguments: if arguments['extract'].repository is None or repository == arguments[ 'extract'].repository: logger.info('{}: Extracting archive {}'.format( repository, arguments['extract'].archive)) borg_extract.extract_archive( global_arguments.dry_run, repository, arguments['extract'].archive, arguments['extract'].restore_paths, location, storage, local_path=local_path, remote_path=remote_path, progress=arguments['extract'].progress, ) if 'list' in arguments: if arguments['list'].repository is None or repository == arguments[ 'list'].repository: logger.info('{}: Listing archives'.format(repository)) json_output = borg_list.list_archives( repository, storage, list_arguments=arguments['list'], local_path=local_path, remote_path=remote_path, ) if json_output: yield json.loads(json_output) if 'info' in arguments: if arguments['info'].repository is None or repository == arguments[ 'info'].repository: logger.info( '{}: Displaying summary info for archives'.format(repository)) json_output = borg_info.display_archives_info( repository, storage, info_arguments=arguments['info'], local_path=local_path, remote_path=remote_path, ) if json_output: yield json.loads(json_output)
def run_actions(*, arguments, location, storage, retention, consistency, hooks, local_path, remote_path, repository_path): # pragma: no cover ''' Given parsed command-line arguments as an argparse.ArgumentParser instance, several different configuration dicts, local and remote paths to Borg, and a repository name, run all actions from the command-line arguments on the given repository. Yield JSON output strings from executing any actions that produce JSON. Raise OSError or subprocess.CalledProcessError if an error occurs running a command for an action. Raise ValueError if the arguments or configuration passed to action are invalid. ''' repository = os.path.expanduser(repository_path) global_arguments = arguments['global'] dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else '' if 'init' in arguments: logger.info('{}: Initializing repository'.format(repository)) borg_init.initialize_repository( repository, storage, arguments['init'].encryption_mode, arguments['init'].append_only, arguments['init'].storage_quota, local_path=local_path, remote_path=remote_path, ) if 'prune' in arguments: logger.info('{}: Pruning archives{}'.format(repository, dry_run_label)) borg_prune.prune_archives( global_arguments.dry_run, repository, storage, retention, local_path=local_path, remote_path=remote_path, stats=arguments['prune'].stats, files=arguments['prune'].files, ) if 'create' in arguments: logger.info('{}: Creating archive{}'.format(repository, dry_run_label)) dispatch.call_hooks( 'remove_database_dumps', hooks, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) active_dumps = dispatch.call_hooks( 'dump_databases', hooks, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) stream_processes = [ process for processes in active_dumps.values() for process in processes ] json_output = borg_create.create_archive( global_arguments.dry_run, repository, location, storage, local_path=local_path, remote_path=remote_path, progress=arguments['create'].progress, stats=arguments['create'].stats, json=arguments['create'].json, files=arguments['create'].files, stream_processes=stream_processes, ) if json_output: yield json.loads(json_output) if 'check' in arguments and checks.repository_enabled_for_checks( repository, consistency): logger.info('{}: Running consistency checks'.format(repository)) borg_check.check_archives( repository, storage, consistency, local_path=local_path, remote_path=remote_path, progress=arguments['check'].progress, repair=arguments['check'].repair, only_checks=arguments['check'].only, ) if 'extract' in arguments: if arguments[ 'extract'].repository is None or validate.repositories_match( repository, arguments['extract'].repository): logger.info('{}: Extracting archive {}'.format( repository, arguments['extract'].archive)) borg_extract.extract_archive( global_arguments.dry_run, repository, borg_list.resolve_archive_name(repository, arguments['extract'].archive, storage, local_path, remote_path), arguments['extract'].paths, location, storage, local_path=local_path, remote_path=remote_path, destination_path=arguments['extract'].destination, strip_components=arguments['extract'].strip_components, progress=arguments['extract'].progress, ) if 'mount' in arguments: if arguments['mount'].repository is None or validate.repositories_match( repository, arguments['mount'].repository): if arguments['mount'].archive: logger.info('{}: Mounting archive {}'.format( repository, arguments['mount'].archive)) else: logger.info('{}: Mounting repository'.format(repository)) borg_mount.mount_archive( repository, borg_list.resolve_archive_name(repository, arguments['mount'].archive, storage, local_path, remote_path), arguments['mount'].mount_point, arguments['mount'].paths, arguments['mount'].foreground, arguments['mount'].options, storage, local_path=local_path, remote_path=remote_path, ) if 'restore' in arguments: if arguments[ 'restore'].repository is None or validate.repositories_match( repository, arguments['restore'].repository): logger.info('{}: Restoring databases from archive {}'.format( repository, arguments['restore'].archive)) dispatch.call_hooks( 'remove_database_dumps', hooks, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) restore_names = arguments['restore'].databases or [] if 'all' in restore_names: restore_names = [] archive_name = borg_list.resolve_archive_name( repository, arguments['restore'].archive, storage, local_path, remote_path) found_names = set() for hook_name, per_hook_restore_databases in hooks.items(): if hook_name not in dump.DATABASE_HOOK_NAMES: continue for restore_database in per_hook_restore_databases: database_name = restore_database['name'] if restore_names and database_name not in restore_names: continue found_names.add(database_name) dump_pattern = dispatch.call_hooks( 'make_database_dump_pattern', hooks, repository, dump.DATABASE_HOOK_NAMES, location, database_name, )[hook_name] # Kick off a single database extract to stdout. extract_process = borg_extract.extract_archive( dry_run=global_arguments.dry_run, repository=repository, archive=archive_name, paths=dump.convert_glob_patterns_to_borg_patterns( [dump_pattern]), location_config=location, storage_config=storage, local_path=local_path, remote_path=remote_path, destination_path='/', # A directory format dump isn't a single file, and therefore can't extract # to stdout. In this case, the extract_process return value is None. extract_to_stdout=bool( restore_database.get('format') != 'directory'), ) # Run a single database restore, consuming the extract stdout (if any). dispatch.call_hooks( 'restore_database_dump', {hook_name: [restore_database]}, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, extract_process, ) dispatch.call_hooks( 'remove_database_dumps', hooks, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) if not restore_names and not found_names: raise ValueError('No databases were found to restore') missing_names = sorted(set(restore_names) - found_names) if missing_names: raise ValueError( 'Cannot restore database(s) {} missing from borgmatic\'s configuration' .format(', '.join(missing_names))) if 'list' in arguments: if arguments['list'].repository is None or validate.repositories_match( repository, arguments['list'].repository): list_arguments = copy.copy(arguments['list']) if not list_arguments.json: logger.warning('{}: Listing archives'.format(repository)) list_arguments.archive = borg_list.resolve_archive_name( repository, list_arguments.archive, storage, local_path, remote_path) json_output = borg_list.list_archives( repository, storage, list_arguments=list_arguments, local_path=local_path, remote_path=remote_path, ) if json_output: yield json.loads(json_output) if 'info' in arguments: if arguments['info'].repository is None or validate.repositories_match( repository, arguments['info'].repository): info_arguments = copy.copy(arguments['info']) if not info_arguments.json: logger.warning( '{}: Displaying summary info for archives'.format( repository)) info_arguments.archive = borg_list.resolve_archive_name( repository, info_arguments.archive, storage, local_path, remote_path) json_output = borg_info.display_archives_info( repository, storage, info_arguments=info_arguments, local_path=local_path, remote_path=remote_path, ) if json_output: yield json.loads(json_output)
def run_actions(*, arguments, location, storage, retention, consistency, hooks, local_path, remote_path, repository_path): # pragma: no cover ''' Given parsed command-line arguments as an argparse.ArgumentParser instance, several different configuration dicts, local and remote paths to Borg, and a repository name, run all actions from the command-line arguments on the given repository. Yield JSON output strings from executing any actions that produce JSON. Raise OSError or subprocess.CalledProcessError if an error occurs running a command for an action. Raise ValueError if the arguments or configuration passed to action are invalid. ''' repository = os.path.expanduser(repository_path) global_arguments = arguments['global'] dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else '' if 'init' in arguments: logger.info('{}: Initializing repository'.format(repository)) borg_init.initialize_repository( repository, storage, arguments['init'].encryption_mode, arguments['init'].append_only, arguments['init'].storage_quota, local_path=local_path, remote_path=remote_path, ) if 'prune' in arguments: logger.info('{}: Pruning archives{}'.format(repository, dry_run_label)) borg_prune.prune_archives( global_arguments.dry_run, repository, storage, retention, local_path=local_path, remote_path=remote_path, stats=arguments['prune'].stats, files=arguments['prune'].files, ) if 'create' in arguments: logger.info('{}: Creating archive{}'.format(repository, dry_run_label)) json_output = borg_create.create_archive( global_arguments.dry_run, repository, location, storage, local_path=local_path, remote_path=remote_path, progress=arguments['create'].progress, stats=arguments['create'].stats, json=arguments['create'].json, files=arguments['create'].files, ) if json_output: yield json.loads(json_output) if 'check' in arguments and checks.repository_enabled_for_checks( repository, consistency): logger.info('{}: Running consistency checks'.format(repository)) borg_check.check_archives( repository, storage, consistency, local_path=local_path, remote_path=remote_path, progress=arguments['check'].progress, repair=arguments['check'].repair, only_checks=arguments['check'].only, ) if 'extract' in arguments: if arguments[ 'extract'].repository is None or validate.repositories_match( repository, arguments['extract'].repository): logger.info('{}: Extracting archive {}'.format( repository, arguments['extract'].archive)) borg_extract.extract_archive( global_arguments.dry_run, repository, borg_list.resolve_archive_name(repository, arguments['extract'].archive, storage, local_path, remote_path), arguments['extract'].paths, location, storage, local_path=local_path, remote_path=remote_path, destination_path=arguments['extract'].destination, progress=arguments['extract'].progress, ) if 'mount' in arguments: if arguments['mount'].repository is None or validate.repositories_match( repository, arguments['mount'].repository): if arguments['mount'].archive: logger.info('{}: Mounting archive {}'.format( repository, arguments['mount'].archive)) else: logger.info('{}: Mounting repository'.format(repository)) borg_mount.mount_archive( repository, borg_list.resolve_archive_name(repository, arguments['mount'].archive, storage, local_path, remote_path), arguments['mount'].mount_point, arguments['mount'].paths, arguments['mount'].foreground, arguments['mount'].options, storage, local_path=local_path, remote_path=remote_path, ) if 'restore' in arguments: if arguments[ 'restore'].repository is None or validate.repositories_match( repository, arguments['restore'].repository): logger.info('{}: Restoring databases from archive {}'.format( repository, arguments['restore'].archive)) restore_names = arguments['restore'].databases or [] if 'all' in restore_names: restore_names = [] # Extract dumps for the named databases from the archive. dump_patterns = dispatch.call_hooks( 'make_database_dump_patterns', hooks, repository, dump.DATABASE_HOOK_NAMES, location, restore_names, ) borg_extract.extract_archive( global_arguments.dry_run, repository, borg_list.resolve_archive_name(repository, arguments['restore'].archive, storage, local_path, remote_path), dump.convert_glob_patterns_to_borg_patterns( dump.flatten_dump_patterns(dump_patterns, restore_names)), location, storage, local_path=local_path, remote_path=remote_path, destination_path='/', progress=arguments['restore'].progress, # We don't want glob patterns that don't match to error. error_on_warnings=False, ) # Map the restore names or detected dumps to the corresponding database configurations. restore_databases = dump.get_per_hook_database_configurations( hooks, restore_names, dump_patterns) # Finally, restore the databases and cleanup the dumps. dispatch.call_hooks( 'restore_database_dumps', restore_databases, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) dispatch.call_hooks( 'remove_database_dumps', restore_databases, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) if 'list' in arguments: if arguments['list'].repository is None or validate.repositories_match( repository, arguments['list'].repository): list_arguments = copy.copy(arguments['list']) if not list_arguments.json: logger.warning('{}: Listing archives'.format(repository)) list_arguments.archive = borg_list.resolve_archive_name( repository, list_arguments.archive, storage, local_path, remote_path) json_output = borg_list.list_archives( repository, storage, list_arguments=list_arguments, local_path=local_path, remote_path=remote_path, ) if json_output: yield json.loads(json_output) if 'info' in arguments: if arguments['info'].repository is None or validate.repositories_match( repository, arguments['info'].repository): info_arguments = copy.copy(arguments['info']) if not info_arguments.json: logger.warning( '{}: Displaying summary info for archives'.format( repository)) info_arguments.archive = borg_list.resolve_archive_name( repository, info_arguments.archive, storage, local_path, remote_path) json_output = borg_info.display_archives_info( repository, storage, info_arguments=info_arguments, local_path=local_path, remote_path=remote_path, ) if json_output: yield json.loads(json_output)
def run_configuration(config_filename, args): # pragma: no cover ''' Parse a single configuration file, and execute its defined pruning, backups, and/or consistency checks. ''' logger.info('{}: Parsing configuration file'.format(config_filename)) config = validate.parse_configuration(config_filename, validate.schema_filename()) (location, storage, retention, consistency, hooks) = (config.get(section_name, {}) for section_name in ('location', 'storage', 'retention', 'consistency', 'hooks')) try: local_path = location.get('local_path', 'borg') remote_path = location.get('remote_path') borg_create.initialize_environment(storage) hook.execute_hook(hooks.get('before_backup'), config_filename, 'pre-backup') for unexpanded_repository in location['repositories']: repository = os.path.expanduser(unexpanded_repository) dry_run_label = ' (dry run; not making any changes)' if args.dry_run else '' if args.prune: logger.info('{}: Pruning archives{}'.format( repository, dry_run_label)) borg_prune.prune_archives( args.verbosity, args.dry_run, repository, storage, retention, local_path=local_path, remote_path=remote_path, ) if args.create: logger.info('{}: Creating archive{}'.format( repository, dry_run_label)) borg_create.create_archive( args.verbosity, args.dry_run, repository, location, storage, local_path=local_path, remote_path=remote_path, ) if args.check: logger.info( '{}: Running consistency checks'.format(repository)) borg_check.check_archives( args.verbosity, repository, storage, consistency, local_path=local_path, remote_path=remote_path, ) if args.list: logger.info('{}: Listing archives'.format(repository)) borg_list.list_archives( args.verbosity, repository, storage, local_path=local_path, remote_path=remote_path, ) if args.info: logger.info('{}: Displaying summary info for archives'.format( repository)) borg_info.display_archives_info( args.verbosity, repository, storage, local_path=local_path, remote_path=remote_path, ) hook.execute_hook(hooks.get('after_backup'), config_filename, 'post-backup') except (OSError, CalledProcessError): hook.execute_hook(hooks.get('on_error'), config_filename, 'on-error') raise