Ejemplo n.º 1
0
def test_export_tar_archive_calls_borg_with_dry_run_parameter():
    flexmock(module.os.path).should_receive('abspath').and_return('repo')
    flexmock(module).should_receive('execute_command').never()

    module.export_tar_archive(
        dry_run=True,
        repository='repo',
        archive='archive',
        paths=None,
        destination_path='test.tar',
        storage_config={},
    )
Ejemplo n.º 2
0
def test_export_tar_archive_calls_borg_with_lock_wait_parameters():
    flexmock(module.os.path).should_receive('abspath').and_return('repo')
    insert_execute_command_mock(('borg', 'export-tar', '--lock-wait', '5',
                                 'repo::archive', 'test.tar'))

    module.export_tar_archive(
        dry_run=False,
        repository='repo',
        archive='archive',
        paths=None,
        destination_path='test.tar',
        storage_config={'lock_wait': '5'},
    )
Ejemplo n.º 3
0
def test_export_tar_archive_calls_borg_with_stdout_destination_path():
    flexmock(module.os.path).should_receive('abspath').and_return('repo')
    insert_execute_command_mock(('borg', 'export-tar', 'repo::archive', '-'),
                                capture=False)

    module.export_tar_archive(
        dry_run=False,
        repository='repo',
        archive='archive',
        paths=None,
        destination_path='-',
        storage_config={},
    )
Ejemplo n.º 4
0
def test_export_tar_archive_skips_abspath_for_remote_repository_parameter():
    flexmock(module.os.path).should_receive('abspath').never()
    insert_execute_command_mock(
        ('borg', 'export-tar', 'server:repo::archive', 'test.tar'))

    module.export_tar_archive(
        dry_run=False,
        repository='server:repo',
        archive='archive',
        paths=None,
        destination_path='test.tar',
        storage_config={},
    )
Ejemplo n.º 5
0
def test_export_tar_archive_with_log_info_calls_borg_with_info_parameter():
    flexmock(module.os.path).should_receive('abspath').and_return('repo')
    insert_execute_command_mock(
        ('borg', 'export-tar', '--info', 'repo::archive', 'test.tar'))
    insert_logging_mock(logging.INFO)

    module.export_tar_archive(
        dry_run=False,
        repository='repo',
        archive='archive',
        paths=None,
        destination_path='test.tar',
        storage_config={},
    )
Ejemplo n.º 6
0
def test_export_tar_archive_calls_borg_with_strip_components_parameter():
    flexmock(module.os.path).should_receive('abspath').and_return('repo')
    insert_execute_command_mock(('borg', 'export-tar', '--strip-components',
                                 '5', 'repo::archive', 'test.tar'))

    module.export_tar_archive(
        dry_run=False,
        repository='repo',
        archive='archive',
        paths=None,
        destination_path='test.tar',
        storage_config={},
        strip_components=5,
    )
Ejemplo n.º 7
0
def test_export_tar_archive_calls_borg_with_list_parameter():
    flexmock(module.os.path).should_receive('abspath').and_return('repo')
    insert_execute_command_mock(
        ('borg', 'export-tar', '--list', 'repo::archive', 'test.tar'),
        output_log_level=logging.WARNING,
    )

    module.export_tar_archive(
        dry_run=False,
        repository='repo',
        archive='archive',
        paths=None,
        destination_path='test.tar',
        storage_config={},
        files=True,
    )
Ejemplo n.º 8
0
def run_actions(*, arguments, location, storage, retention, consistency, hooks,
                local_path, remote_path, repository_path):  # pragma: no cover
    '''
    Given parsed command-line arguments as an argparse.ArgumentParser instance, several different
    configuration dicts, local and remote paths to Borg, and a repository name, run all actions
    from the command-line arguments on the given repository.

    Yield JSON output strings from executing any actions that produce JSON.

    Raise OSError or subprocess.CalledProcessError if an error occurs running a command for an
    action. Raise ValueError if the arguments or configuration passed to action are invalid.
    '''
    repository = os.path.expanduser(repository_path)
    global_arguments = arguments['global']
    dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else ''
    if 'init' in arguments:
        logger.info('{}: Initializing repository'.format(repository))
        borg_init.initialize_repository(
            repository,
            storage,
            arguments['init'].encryption_mode,
            arguments['init'].append_only,
            arguments['init'].storage_quota,
            local_path=local_path,
            remote_path=remote_path,
        )
    if 'prune' in arguments:
        logger.info('{}: Pruning archives{}'.format(repository, dry_run_label))
        borg_prune.prune_archives(
            global_arguments.dry_run,
            repository,
            storage,
            retention,
            local_path=local_path,
            remote_path=remote_path,
            stats=arguments['prune'].stats,
            files=arguments['prune'].files,
        )
    if 'create' in arguments:
        logger.info('{}: Creating archive{}'.format(repository, dry_run_label))
        dispatch.call_hooks(
            'remove_database_dumps',
            hooks,
            repository,
            dump.DATABASE_HOOK_NAMES,
            location,
            global_arguments.dry_run,
        )
        active_dumps = dispatch.call_hooks(
            'dump_databases',
            hooks,
            repository,
            dump.DATABASE_HOOK_NAMES,
            location,
            global_arguments.dry_run,
        )
        stream_processes = [
            process for processes in active_dumps.values()
            for process in processes
        ]

        json_output = borg_create.create_archive(
            global_arguments.dry_run,
            repository,
            location,
            storage,
            local_path=local_path,
            remote_path=remote_path,
            progress=arguments['create'].progress,
            stats=arguments['create'].stats,
            json=arguments['create'].json,
            files=arguments['create'].files,
            stream_processes=stream_processes,
        )
        if json_output:
            yield json.loads(json_output)

    if 'check' in arguments and checks.repository_enabled_for_checks(
            repository, consistency):
        logger.info('{}: Running consistency checks'.format(repository))
        borg_check.check_archives(
            repository,
            storage,
            consistency,
            local_path=local_path,
            remote_path=remote_path,
            progress=arguments['check'].progress,
            repair=arguments['check'].repair,
            only_checks=arguments['check'].only,
        )
    if 'extract' in arguments:
        if arguments[
                'extract'].repository is None or validate.repositories_match(
                    repository, arguments['extract'].repository):
            logger.info('{}: Extracting archive {}'.format(
                repository, arguments['extract'].archive))
            borg_extract.extract_archive(
                global_arguments.dry_run,
                repository,
                borg_list.resolve_archive_name(repository,
                                               arguments['extract'].archive,
                                               storage, local_path,
                                               remote_path),
                arguments['extract'].paths,
                location,
                storage,
                local_path=local_path,
                remote_path=remote_path,
                destination_path=arguments['extract'].destination,
                strip_components=arguments['extract'].strip_components,
                progress=arguments['extract'].progress,
            )
    if 'export-tar' in arguments:
        if arguments[
                'export-tar'].repository is None or validate.repositories_match(
                    repository, arguments['export-tar'].repository):
            logger.info('{}: Exporting archive {} as tar file'.format(
                repository, arguments['export-tar'].archive))
            borg_export_tar.export_tar_archive(
                global_arguments.dry_run,
                repository,
                borg_list.resolve_archive_name(repository,
                                               arguments['export-tar'].archive,
                                               storage, local_path,
                                               remote_path),
                arguments['export-tar'].paths,
                arguments['export-tar'].destination,
                storage,
                local_path=local_path,
                remote_path=remote_path,
                tar_filter=arguments['export-tar'].tar_filter,
                files=arguments['export-tar'].files,
                strip_components=arguments['export-tar'].strip_components,
            )
    if 'mount' in arguments:
        if arguments['mount'].repository is None or validate.repositories_match(
                repository, arguments['mount'].repository):
            if arguments['mount'].archive:
                logger.info('{}: Mounting archive {}'.format(
                    repository, arguments['mount'].archive))
            else:
                logger.info('{}: Mounting repository'.format(repository))

            borg_mount.mount_archive(
                repository,
                borg_list.resolve_archive_name(repository,
                                               arguments['mount'].archive,
                                               storage, local_path,
                                               remote_path),
                arguments['mount'].mount_point,
                arguments['mount'].paths,
                arguments['mount'].foreground,
                arguments['mount'].options,
                storage,
                local_path=local_path,
                remote_path=remote_path,
            )
    if 'restore' in arguments:
        if arguments[
                'restore'].repository is None or validate.repositories_match(
                    repository, arguments['restore'].repository):
            logger.info('{}: Restoring databases from archive {}'.format(
                repository, arguments['restore'].archive))
            dispatch.call_hooks(
                'remove_database_dumps',
                hooks,
                repository,
                dump.DATABASE_HOOK_NAMES,
                location,
                global_arguments.dry_run,
            )

            restore_names = arguments['restore'].databases or []
            if 'all' in restore_names:
                restore_names = []

            archive_name = borg_list.resolve_archive_name(
                repository, arguments['restore'].archive, storage, local_path,
                remote_path)
            found_names = set()

            for hook_name, per_hook_restore_databases in hooks.items():
                if hook_name not in dump.DATABASE_HOOK_NAMES:
                    continue

                for restore_database in per_hook_restore_databases:
                    database_name = restore_database['name']
                    if restore_names and database_name not in restore_names:
                        continue

                    found_names.add(database_name)
                    dump_pattern = dispatch.call_hooks(
                        'make_database_dump_pattern',
                        hooks,
                        repository,
                        dump.DATABASE_HOOK_NAMES,
                        location,
                        database_name,
                    )[hook_name]

                    # Kick off a single database extract to stdout.
                    extract_process = borg_extract.extract_archive(
                        dry_run=global_arguments.dry_run,
                        repository=repository,
                        archive=archive_name,
                        paths=dump.convert_glob_patterns_to_borg_patterns(
                            [dump_pattern]),
                        location_config=location,
                        storage_config=storage,
                        local_path=local_path,
                        remote_path=remote_path,
                        destination_path='/',
                        # A directory format dump isn't a single file, and therefore can't extract
                        # to stdout. In this case, the extract_process return value is None.
                        extract_to_stdout=bool(
                            restore_database.get('format') != 'directory'),
                    )

                    # Run a single database restore, consuming the extract stdout (if any).
                    dispatch.call_hooks(
                        'restore_database_dump',
                        {hook_name: [restore_database]},
                        repository,
                        dump.DATABASE_HOOK_NAMES,
                        location,
                        global_arguments.dry_run,
                        extract_process,
                    )

            dispatch.call_hooks(
                'remove_database_dumps',
                hooks,
                repository,
                dump.DATABASE_HOOK_NAMES,
                location,
                global_arguments.dry_run,
            )

            if not restore_names and not found_names:
                raise ValueError('No databases were found to restore')

            missing_names = sorted(set(restore_names) - found_names)
            if missing_names:
                raise ValueError(
                    'Cannot restore database(s) {} missing from borgmatic\'s configuration'
                    .format(', '.join(missing_names)))

    if 'list' in arguments:
        if arguments['list'].repository is None or validate.repositories_match(
                repository, arguments['list'].repository):
            list_arguments = copy.copy(arguments['list'])
            if not list_arguments.json:
                logger.warning('{}: Listing archives'.format(repository))
            list_arguments.archive = borg_list.resolve_archive_name(
                repository, list_arguments.archive, storage, local_path,
                remote_path)
            json_output = borg_list.list_archives(
                repository,
                storage,
                list_arguments=list_arguments,
                local_path=local_path,
                remote_path=remote_path,
            )
            if json_output:
                yield json.loads(json_output)
    if 'info' in arguments:
        if arguments['info'].repository is None or validate.repositories_match(
                repository, arguments['info'].repository):
            info_arguments = copy.copy(arguments['info'])
            if not info_arguments.json:
                logger.warning(
                    '{}: Displaying summary info for archives'.format(
                        repository))
            info_arguments.archive = borg_list.resolve_archive_name(
                repository, info_arguments.archive, storage, local_path,
                remote_path)
            json_output = borg_info.display_archives_info(
                repository,
                storage,
                info_arguments=info_arguments,
                local_path=local_path,
                remote_path=remote_path,
            )
            if json_output:
                yield json.loads(json_output)