def test_call_hooks_calls_skips_return_values_for_null_hooks(): hooks = {'super_hook': flexmock(), 'other_hook': None} expected_return_values = {'super_hook': flexmock()} flexmock(module).should_receive('call_hook').and_return( expected_return_values['super_hook']) return_values = module.call_hooks('do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55) assert return_values == expected_return_values
def run_configuration(config_filename, config, arguments): ''' Given a config filename, the corresponding parsed config dict, and command-line arguments as a dict from subparser name to a namespace of parsed arguments, execute its defined pruning, backups, consistency checks, and/or other actions. Yield a combination of: * JSON output strings from successfully executing any actions that produce JSON * logging.LogRecord instances containing errors from any actions or backup hooks that fail ''' (location, storage, retention, consistency, hooks) = (config.get(section_name, {}) for section_name in ('location', 'storage', 'retention', 'consistency', 'hooks')) global_arguments = arguments['global'] local_path = location.get('local_path', 'borg') remote_path = location.get('remote_path') borg_environment.initialize(storage) encountered_error = None error_repository = '' prune_create_or_check = {'prune', 'create', 'check'}.intersection(arguments) monitoring_log_level = verbosity_to_log_level( global_arguments.monitoring_verbosity) try: if prune_create_or_check: dispatch.call_hooks( 'ping_monitor', hooks, config_filename, monitor.MONITOR_HOOK_NAMES, monitor.State.START, monitoring_log_level, global_arguments.dry_run, ) if 'prune' in arguments: command.execute_hook( hooks.get('before_prune'), hooks.get('umask'), config_filename, 'pre-prune', global_arguments.dry_run, ) if 'create' in arguments: command.execute_hook( hooks.get('before_backup'), hooks.get('umask'), config_filename, 'pre-backup', global_arguments.dry_run, ) dispatch.call_hooks( 'dump_databases', hooks, config_filename, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) if 'check' in arguments: command.execute_hook( hooks.get('before_check'), hooks.get('umask'), config_filename, 'pre-check', global_arguments.dry_run, ) except (OSError, CalledProcessError) as error: if command.considered_soft_failure(config_filename, error): return encountered_error = error yield from make_error_log_records( '{}: Error running pre hook'.format(config_filename), error) if not encountered_error: for repository_path in location['repositories']: try: yield from run_actions( arguments=arguments, location=location, storage=storage, retention=retention, consistency=consistency, hooks=hooks, local_path=local_path, remote_path=remote_path, repository_path=repository_path, ) except (OSError, CalledProcessError, ValueError) as error: encountered_error = error error_repository = repository_path yield from make_error_log_records( '{}: Error running actions for repository'.format( repository_path), error) if not encountered_error: try: if 'prune' in arguments: command.execute_hook( hooks.get('after_prune'), hooks.get('umask'), config_filename, 'post-prune', global_arguments.dry_run, ) if 'create' in arguments: dispatch.call_hooks( 'remove_database_dumps', hooks, config_filename, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) command.execute_hook( hooks.get('after_backup'), hooks.get('umask'), config_filename, 'post-backup', global_arguments.dry_run, ) if 'check' in arguments: command.execute_hook( hooks.get('after_check'), hooks.get('umask'), config_filename, 'post-check', global_arguments.dry_run, ) if {'prune', 'create', 'check'}.intersection(arguments): dispatch.call_hooks( 'ping_monitor', hooks, config_filename, monitor.MONITOR_HOOK_NAMES, monitor.State.FINISH, monitoring_log_level, global_arguments.dry_run, ) except (OSError, CalledProcessError) as error: if command.considered_soft_failure(config_filename, error): return encountered_error = error yield from make_error_log_records( '{}: Error running post hook'.format(config_filename), error) if encountered_error and prune_create_or_check: try: command.execute_hook( hooks.get('on_error'), hooks.get('umask'), config_filename, 'on-error', global_arguments.dry_run, repository=error_repository, error=encountered_error, output=getattr(encountered_error, 'output', ''), ) dispatch.call_hooks( 'ping_monitor', hooks, config_filename, monitor.MONITOR_HOOK_NAMES, monitor.State.FAIL, monitoring_log_level, global_arguments.dry_run, ) except (OSError, CalledProcessError) as error: if command.considered_soft_failure(config_filename, error): return yield from make_error_log_records( '{}: Error running on-error hook'.format(config_filename), error)
def run_actions(*, arguments, location, storage, retention, consistency, hooks, local_path, remote_path, repository_path): # pragma: no cover ''' Given parsed command-line arguments as an argparse.ArgumentParser instance, several different configuration dicts, local and remote paths to Borg, and a repository name, run all actions from the command-line arguments on the given repository. Yield JSON output strings from executing any actions that produce JSON. Raise OSError or subprocess.CalledProcessError if an error occurs running a command for an action. Raise ValueError if the arguments or configuration passed to action are invalid. ''' repository = os.path.expanduser(repository_path) global_arguments = arguments['global'] dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else '' if 'init' in arguments: logger.info('{}: Initializing repository'.format(repository)) borg_init.initialize_repository( repository, storage, arguments['init'].encryption_mode, arguments['init'].append_only, arguments['init'].storage_quota, local_path=local_path, remote_path=remote_path, ) if 'prune' in arguments: logger.info('{}: Pruning archives{}'.format(repository, dry_run_label)) borg_prune.prune_archives( global_arguments.dry_run, repository, storage, retention, local_path=local_path, remote_path=remote_path, stats=arguments['prune'].stats, files=arguments['prune'].files, ) if 'create' in arguments: logger.info('{}: Creating archive{}'.format(repository, dry_run_label)) json_output = borg_create.create_archive( global_arguments.dry_run, repository, location, storage, local_path=local_path, remote_path=remote_path, progress=arguments['create'].progress, stats=arguments['create'].stats, json=arguments['create'].json, files=arguments['create'].files, ) if json_output: yield json.loads(json_output) if 'check' in arguments and checks.repository_enabled_for_checks( repository, consistency): logger.info('{}: Running consistency checks'.format(repository)) borg_check.check_archives( repository, storage, consistency, local_path=local_path, remote_path=remote_path, progress=arguments['check'].progress, repair=arguments['check'].repair, only_checks=arguments['check'].only, ) if 'extract' in arguments: if arguments[ 'extract'].repository is None or validate.repositories_match( repository, arguments['extract'].repository): logger.info('{}: Extracting archive {}'.format( repository, arguments['extract'].archive)) borg_extract.extract_archive( global_arguments.dry_run, repository, borg_list.resolve_archive_name(repository, arguments['extract'].archive, storage, local_path, remote_path), arguments['extract'].paths, location, storage, local_path=local_path, remote_path=remote_path, destination_path=arguments['extract'].destination, progress=arguments['extract'].progress, ) if 'mount' in arguments: if arguments['mount'].repository is None or validate.repositories_match( repository, arguments['mount'].repository): if arguments['mount'].archive: logger.info('{}: Mounting archive {}'.format( repository, arguments['mount'].archive)) else: logger.info('{}: Mounting repository'.format(repository)) borg_mount.mount_archive( repository, borg_list.resolve_archive_name(repository, arguments['mount'].archive, storage, local_path, remote_path), arguments['mount'].mount_point, arguments['mount'].paths, arguments['mount'].foreground, arguments['mount'].options, storage, local_path=local_path, remote_path=remote_path, ) if 'restore' in arguments: if arguments[ 'restore'].repository is None or validate.repositories_match( repository, arguments['restore'].repository): logger.info('{}: Restoring databases from archive {}'.format( repository, arguments['restore'].archive)) restore_names = arguments['restore'].databases or [] if 'all' in restore_names: restore_names = [] # Extract dumps for the named databases from the archive. dump_patterns = dispatch.call_hooks( 'make_database_dump_patterns', hooks, repository, dump.DATABASE_HOOK_NAMES, location, restore_names, ) borg_extract.extract_archive( global_arguments.dry_run, repository, borg_list.resolve_archive_name(repository, arguments['restore'].archive, storage, local_path, remote_path), dump.convert_glob_patterns_to_borg_patterns( dump.flatten_dump_patterns(dump_patterns, restore_names)), location, storage, local_path=local_path, remote_path=remote_path, destination_path='/', progress=arguments['restore'].progress, # We don't want glob patterns that don't match to error. error_on_warnings=False, ) # Map the restore names or detected dumps to the corresponding database configurations. restore_databases = dump.get_per_hook_database_configurations( hooks, restore_names, dump_patterns) # Finally, restore the databases and cleanup the dumps. dispatch.call_hooks( 'restore_database_dumps', restore_databases, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) dispatch.call_hooks( 'remove_database_dumps', restore_databases, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) if 'list' in arguments: if arguments['list'].repository is None or validate.repositories_match( repository, arguments['list'].repository): list_arguments = copy.copy(arguments['list']) if not list_arguments.json: logger.warning('{}: Listing archives'.format(repository)) list_arguments.archive = borg_list.resolve_archive_name( repository, list_arguments.archive, storage, local_path, remote_path) json_output = borg_list.list_archives( repository, storage, list_arguments=list_arguments, local_path=local_path, remote_path=remote_path, ) if json_output: yield json.loads(json_output) if 'info' in arguments: if arguments['info'].repository is None or validate.repositories_match( repository, arguments['info'].repository): info_arguments = copy.copy(arguments['info']) if not info_arguments.json: logger.warning( '{}: Displaying summary info for archives'.format( repository)) info_arguments.archive = borg_list.resolve_archive_name( repository, info_arguments.archive, storage, local_path, remote_path) json_output = borg_info.display_archives_info( repository, storage, info_arguments=info_arguments, local_path=local_path, remote_path=remote_path, ) if json_output: yield json.loads(json_output)
def run_actions(*, arguments, location, storage, retention, consistency, hooks, local_path, remote_path, repository_path): # pragma: no cover ''' Given parsed command-line arguments as an argparse.ArgumentParser instance, several different configuration dicts, local and remote paths to Borg, and a repository name, run all actions from the command-line arguments on the given repository. Yield JSON output strings from executing any actions that produce JSON. Raise OSError or subprocess.CalledProcessError if an error occurs running a command for an action. Raise ValueError if the arguments or configuration passed to action are invalid. ''' repository = os.path.expanduser(repository_path) global_arguments = arguments['global'] dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else '' if 'init' in arguments: logger.info('{}: Initializing repository'.format(repository)) borg_init.initialize_repository( repository, storage, arguments['init'].encryption_mode, arguments['init'].append_only, arguments['init'].storage_quota, local_path=local_path, remote_path=remote_path, ) if 'prune' in arguments: logger.info('{}: Pruning archives{}'.format(repository, dry_run_label)) borg_prune.prune_archives( global_arguments.dry_run, repository, storage, retention, local_path=local_path, remote_path=remote_path, stats=arguments['prune'].stats, files=arguments['prune'].files, ) if 'create' in arguments: logger.info('{}: Creating archive{}'.format(repository, dry_run_label)) dispatch.call_hooks( 'remove_database_dumps', hooks, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) active_dumps = dispatch.call_hooks( 'dump_databases', hooks, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) stream_processes = [ process for processes in active_dumps.values() for process in processes ] json_output = borg_create.create_archive( global_arguments.dry_run, repository, location, storage, local_path=local_path, remote_path=remote_path, progress=arguments['create'].progress, stats=arguments['create'].stats, json=arguments['create'].json, files=arguments['create'].files, stream_processes=stream_processes, ) if json_output: yield json.loads(json_output) if 'check' in arguments and checks.repository_enabled_for_checks( repository, consistency): logger.info('{}: Running consistency checks'.format(repository)) borg_check.check_archives( repository, storage, consistency, local_path=local_path, remote_path=remote_path, progress=arguments['check'].progress, repair=arguments['check'].repair, only_checks=arguments['check'].only, ) if 'extract' in arguments: if arguments[ 'extract'].repository is None or validate.repositories_match( repository, arguments['extract'].repository): logger.info('{}: Extracting archive {}'.format( repository, arguments['extract'].archive)) borg_extract.extract_archive( global_arguments.dry_run, repository, borg_list.resolve_archive_name(repository, arguments['extract'].archive, storage, local_path, remote_path), arguments['extract'].paths, location, storage, local_path=local_path, remote_path=remote_path, destination_path=arguments['extract'].destination, strip_components=arguments['extract'].strip_components, progress=arguments['extract'].progress, ) if 'mount' in arguments: if arguments['mount'].repository is None or validate.repositories_match( repository, arguments['mount'].repository): if arguments['mount'].archive: logger.info('{}: Mounting archive {}'.format( repository, arguments['mount'].archive)) else: logger.info('{}: Mounting repository'.format(repository)) borg_mount.mount_archive( repository, borg_list.resolve_archive_name(repository, arguments['mount'].archive, storage, local_path, remote_path), arguments['mount'].mount_point, arguments['mount'].paths, arguments['mount'].foreground, arguments['mount'].options, storage, local_path=local_path, remote_path=remote_path, ) if 'restore' in arguments: if arguments[ 'restore'].repository is None or validate.repositories_match( repository, arguments['restore'].repository): logger.info('{}: Restoring databases from archive {}'.format( repository, arguments['restore'].archive)) dispatch.call_hooks( 'remove_database_dumps', hooks, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) restore_names = arguments['restore'].databases or [] if 'all' in restore_names: restore_names = [] archive_name = borg_list.resolve_archive_name( repository, arguments['restore'].archive, storage, local_path, remote_path) found_names = set() for hook_name, per_hook_restore_databases in hooks.items(): if hook_name not in dump.DATABASE_HOOK_NAMES: continue for restore_database in per_hook_restore_databases: database_name = restore_database['name'] if restore_names and database_name not in restore_names: continue found_names.add(database_name) dump_pattern = dispatch.call_hooks( 'make_database_dump_pattern', hooks, repository, dump.DATABASE_HOOK_NAMES, location, database_name, )[hook_name] # Kick off a single database extract to stdout. extract_process = borg_extract.extract_archive( dry_run=global_arguments.dry_run, repository=repository, archive=archive_name, paths=dump.convert_glob_patterns_to_borg_patterns( [dump_pattern]), location_config=location, storage_config=storage, local_path=local_path, remote_path=remote_path, destination_path='/', # A directory format dump isn't a single file, and therefore can't extract # to stdout. In this case, the extract_process return value is None. extract_to_stdout=bool( restore_database.get('format') != 'directory'), ) # Run a single database restore, consuming the extract stdout (if any). dispatch.call_hooks( 'restore_database_dump', {hook_name: [restore_database]}, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, extract_process, ) dispatch.call_hooks( 'remove_database_dumps', hooks, repository, dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) if not restore_names and not found_names: raise ValueError('No databases were found to restore') missing_names = sorted(set(restore_names) - found_names) if missing_names: raise ValueError( 'Cannot restore database(s) {} missing from borgmatic\'s configuration' .format(', '.join(missing_names))) if 'list' in arguments: if arguments['list'].repository is None or validate.repositories_match( repository, arguments['list'].repository): list_arguments = copy.copy(arguments['list']) if not list_arguments.json: logger.warning('{}: Listing archives'.format(repository)) list_arguments.archive = borg_list.resolve_archive_name( repository, list_arguments.archive, storage, local_path, remote_path) json_output = borg_list.list_archives( repository, storage, list_arguments=list_arguments, local_path=local_path, remote_path=remote_path, ) if json_output: yield json.loads(json_output) if 'info' in arguments: if arguments['info'].repository is None or validate.repositories_match( repository, arguments['info'].repository): info_arguments = copy.copy(arguments['info']) if not info_arguments.json: logger.warning( '{}: Displaying summary info for archives'.format( repository)) info_arguments.archive = borg_list.resolve_archive_name( repository, info_arguments.archive, storage, local_path, remote_path) json_output = borg_info.display_archives_info( repository, storage, info_arguments=info_arguments, local_path=local_path, remote_path=remote_path, ) if json_output: yield json.loads(json_output)