示例#1
0
def list_archives(repository,
                  storage_config,
                  list_arguments,
                  local_path='borg',
                  remote_path=None):
    '''
    Given a local or remote repository path, a storage config dict, and the arguments to the list
    action, display the output of listing Borg archives in the repository or return JSON output. Or,
    if an archive name is given, listing the files in that archive.
    '''
    lock_wait = storage_config.get('lock_wait', None)
    if list_arguments.successful:
        list_arguments.glob_archives = BORG_EXCLUDE_CHECKPOINTS_GLOB

    full_command = (
        (local_path, 'list') +
        (('--info', ) if logger.getEffectiveLevel() == logging.INFO
         and not list_arguments.json else
         ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG)
                and not list_arguments.json else
                ()) + make_flags('remote-path', remote_path) +
        make_flags('lock-wait', lock_wait) + make_flags_from_arguments(
            list_arguments, excludes=('repository', 'archive', 'successful')) +
        ('::'.join((repository, list_arguments.archive))
         if list_arguments.archive else repository, ))

    return execute_command(
        full_command,
        output_log_level=None if list_arguments.json else logging.WARNING)
示例#2
0
def display_archives_info(repository,
                          storage_config,
                          info_arguments,
                          local_path='borg',
                          remote_path=None):
    '''
    Given a local or remote repository path, a storage config dict, and the arguments to the info
    action, display summary information for Borg archives in the repository or return JSON summary
    information.
    '''
    lock_wait = storage_config.get('lock_wait', None)

    full_command = ((local_path, 'info') +
                    (('--info', ) if logger.getEffectiveLevel() == logging.INFO
                     and not info_arguments.json else
                     ()) + (('--debug', '--show-rc') if logger.isEnabledFor(
                         logging.DEBUG) and not info_arguments.json else
                            ()) + make_flags('remote-path', remote_path) +
                    make_flags('lock-wait', lock_wait) +
                    make_flags_from_arguments(
                        info_arguments, excludes=('repository', 'archive')) +
                    ('::'.join((repository, info_arguments.archive))
                     if info_arguments.archive else repository, ))

    return execute_command(
        full_command,
        output_log_level=None if info_arguments.json else logging.WARNING,
        borg_local_path=local_path,
    )
示例#3
0
文件: init.py 项目: wellic/borgmatic
def initialize_repository(
    repository,
    storage_config,
    encryption_mode,
    append_only=None,
    storage_quota=None,
    local_path='borg',
    remote_path=None,
):
    '''
    Given a local or remote repository path, a storage configuration dict, a Borg encryption mode,
    whether the repository should be append-only, and the storage quota to use, initialize the
    repository. If the repository already exists, then log and skip initialization.
    '''
    info_command = (
        (local_path, 'info')
        + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
        + (('--debug',) if logger.isEnabledFor(logging.DEBUG) else ())
        + (('--remote-path', remote_path) if remote_path else ())
        + (repository,)
    )
    logger.debug(' '.join(info_command))

    try:
        execute_command(info_command, output_log_level=None)
        logger.info('Repository already exists. Skipping initialization.')
        return
    except subprocess.CalledProcessError as error:
        if error.returncode != INFO_REPOSITORY_NOT_FOUND_EXIT_CODE:
            raise

    extra_borg_options = storage_config.get('extra_borg_options', {}).get('init', '')

    init_command = (
        (local_path, 'init')
        + (('--encryption', encryption_mode) if encryption_mode else ())
        + (('--append-only',) if append_only else ())
        + (('--storage-quota', storage_quota) if storage_quota else ())
        + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
        + (('--debug',) if logger.isEnabledFor(logging.DEBUG) else ())
        + (('--remote-path', remote_path) if remote_path else ())
        + (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
        + (repository,)
    )

    # Don't use execute_command() here because it doesn't support interactive prompts.
    execute_command_without_capture(init_command, error_on_warnings=False)
示例#4
0
def test_execute_command_calls_full_command():
    full_command = ['foo', 'bar']
    flexmock(module).should_receive('execute_and_log_output').with_args(
        full_command, output_log_level=logging.INFO, shell=False).once()

    output = module.execute_command(full_command)

    assert output is None
示例#5
0
def extract_last_archive_dry_run(repository, lock_wait=None, local_path='borg', remote_path=None):
    '''
    Perform an extraction dry-run of the most recent archive. If there are no archives, skip the
    dry-run.
    '''
    remote_path_flags = ('--remote-path', remote_path) if remote_path else ()
    lock_wait_flags = ('--lock-wait', str(lock_wait)) if lock_wait else ()
    verbosity_flags = ()
    if logger.isEnabledFor(logging.DEBUG):
        verbosity_flags = ('--debug', '--show-rc')
    elif logger.isEnabledFor(logging.INFO):
        verbosity_flags = ('--info',)

    full_list_command = (
        (local_path, 'list', '--short')
        + remote_path_flags
        + lock_wait_flags
        + verbosity_flags
        + (repository,)
    )

    list_output = execute_command(
        full_list_command, output_log_level=None, borg_local_path=local_path
    )

    try:
        last_archive_name = list_output.strip().splitlines()[-1]
    except IndexError:
        return

    list_flag = ('--list',) if logger.isEnabledFor(logging.DEBUG) else ()
    full_extract_command = (
        (local_path, 'extract', '--dry-run')
        + remote_path_flags
        + lock_wait_flags
        + verbosity_flags
        + list_flag
        + (
            '{repository}::{last_archive_name}'.format(
                repository=repository, last_archive_name=last_archive_name
            ),
        )
    )

    execute_command(full_extract_command, working_directory=None)
示例#6
0
def test_execute_command_calls_full_command_with_extra_environment():
    full_command = ['foo', 'bar']
    flexmock(module.os, environ={'a': 'b'})
    flexmock(module).should_receive('execute_and_log_output').with_args(
        full_command, output_log_level=logging.INFO, shell=False, environment={'a': 'b', 'c': 'd'}
    ).once()

    output = module.execute_command(full_command, extra_environment={'c': 'd'})

    assert output is None
示例#7
0
def check_archives(
    repository,
    storage_config,
    consistency_config,
    local_path='borg',
    remote_path=None,
    only_checks=None,
):
    '''
    Given a local or remote repository path, a storage config dict, a consistency config dict,
    local/remote commands to run, and an optional list of checks to use instead of configured
    checks, check the contained Borg archives for consistency.

    If there are no consistency checks to run, skip running them.
    '''
    checks = _parse_checks(consistency_config, only_checks)
    check_last = consistency_config.get('check_last', None)
    lock_wait = None

    if set(checks).intersection(set(DEFAULT_CHECKS + ('data', ))):
        remote_path_flags = ('--remote-path',
                             remote_path) if remote_path else ()
        lock_wait = storage_config.get('lock_wait', None)
        lock_wait_flags = ('--lock-wait', str(lock_wait)) if lock_wait else ()

        verbosity_flags = ()
        if logger.isEnabledFor(logging.INFO):
            verbosity_flags = ('--info', )
        if logger.isEnabledFor(logging.DEBUG):
            verbosity_flags = ('--debug', '--show-rc')

        prefix = consistency_config.get('prefix', DEFAULT_PREFIX)

        full_command = ((local_path, 'check') +
                        _make_check_flags(checks, check_last, prefix) +
                        remote_path_flags + lock_wait_flags + verbosity_flags +
                        (repository, ))

        execute_command(full_command)

    if 'extract' in checks:
        extract.extract_last_archive_dry_run(repository, lock_wait, local_path,
                                             remote_path)
示例#8
0
def test_execute_command_captures_output():
    full_command = ['foo', 'bar']
    expected_output = '[]'
    flexmock(module.subprocess).should_receive('check_output').with_args(
        full_command, stderr=module.subprocess.STDOUT, shell=False).and_return(
            flexmock(decode=lambda: expected_output)).once()

    output = module.execute_command(full_command, output_log_level=None)

    assert output == expected_output
示例#9
0
def execute_hook(commands, umask, config_filename, description, dry_run):
    '''
    Given a list of hook commands to execute, a umask to execute with (or None), a config filename,
    a hook description, and whether this is a dry run, run the given commands. Or, don't run them
    if this is a dry run.

    Raise ValueError if the umask cannot be parsed.
    '''
    if not commands:
        logger.debug('{}: No commands to run for {} hook'.format(
            config_filename, description))
        return

    dry_run_label = ' (dry run; not actually running hooks)' if dry_run else ''

    if len(commands) == 1:
        logger.info('{}: Running command for {} hook{}'.format(
            config_filename, description, dry_run_label))
    else:
        logger.info('{}: Running {} commands for {} hook{}'.format(
            config_filename, len(commands), description, dry_run_label))

    if umask:
        parsed_umask = int(str(umask), 8)
        logger.debug('{}: Set hook umask to {}'.format(config_filename,
                                                       oct(parsed_umask)))
        original_umask = os.umask(parsed_umask)
    else:
        original_umask = None

    try:
        for command in commands:
            if not dry_run:
                execute.execute_command(
                    [command],
                    output_log_level=logging.ERROR
                    if description == 'on-error' else logging.WARNING,
                    shell=True,
                )
    finally:
        if original_umask:
            os.umask(original_umask)
示例#10
0
def test_execute_command_captures_output_with_shell():
    full_command = ['foo', 'bar']
    expected_output = '[]'
    flexmock(module.os, environ={'a': 'b'})
    flexmock(module.subprocess).should_receive('check_output').with_args(
        full_command, shell=True, env=None
    ).and_return(flexmock(decode=lambda: expected_output)).once()

    output = module.execute_command(full_command, output_log_level=None, shell=True)

    assert output == expected_output
示例#11
0
def initialize_repository(
    repository,
    encryption_mode,
    append_only=None,
    storage_quota=None,
    local_path='borg',
    remote_path=None,
):
    '''
    Given a local or remote repository path, a Borg encryption mode, whether the repository should
    be append-only, and the storage quota to use, initialize the repository. If the repository
    already exists, then log and skip initialization.
    '''
    info_command = (local_path, 'info', repository)
    logger.debug(' '.join(info_command))

    try:
        execute_command(info_command, output_log_level=None)
        logger.info('Repository already exists. Skipping initialization.')
        return
    except subprocess.CalledProcessError as error:
        if error.returncode != INFO_REPOSITORY_NOT_FOUND_EXIT_CODE:
            raise

    init_command = (
        (local_path, 'init') +
        (('--encryption', encryption_mode) if encryption_mode else
         ()) + (('--append-only', ) if append_only else
                ()) + (('--storage-quota', storage_quota) if storage_quota else
                       ()) +
        (('--info', ) if logger.getEffectiveLevel() == logging.INFO else
         ()) + (('--debug', ) if logger.isEnabledFor(logging.DEBUG) else
                ()) + (('--remote-path', remote_path) if remote_path else
                       ()) + (repository, ))

    # Don't use execute_command() here because it doesn't support interactive prompts.
    try:
        subprocess.check_call(init_command)
    except subprocess.CalledProcessError as error:
        if error.returncode >= BORG_ERROR_EXIT_CODE:
            raise
示例#12
0
def dump_databases(databases, config_filename, dry_run):
    '''
    Dump the given PostgreSQL databases to disk. The databases are supplied as a sequence of dicts,
    one dict describing each database as per the configuration schema. Use the given configuration
    filename in any log entries. If this is a dry run, then don't actually dump anything.
    '''
    if not databases:
        logger.debug('{}: No PostgreSQL databases configured'.format(config_filename))
        return

    dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''

    logger.info('{}: Dumping PostgreSQL databases{}'.format(config_filename, dry_run_label))

    for database in databases:
        if os.path.sep in database['name']:
            raise ValueError('Invalid database name {}'.format(database['name']))

        dump_path = os.path.join(
            os.path.expanduser(DUMP_PATH), database.get('hostname', 'localhost')
        )
        name = database['name']
        all_databases = bool(name == 'all')
        command = (
            ('pg_dumpall' if all_databases else 'pg_dump', '--no-password', '--clean')
            + ('--file', os.path.join(dump_path, name))
            + (('--host', database['hostname']) if 'hostname' in database else ())
            + (('--port', str(database['port'])) if 'port' in database else ())
            + (('--username', database['username']) if 'username' in database else ())
            + (() if all_databases else ('--format', database.get('format', 'custom')))
            + (tuple(database['options'].split(' ')) if 'options' in database else ())
            + (() if all_databases else (name,))
        )
        extra_environment = {'PGPASSWORD': database['password']} if 'password' in database else None

        logger.debug(
            '{}: Dumping PostgreSQL database {}{}'.format(config_filename, name, dry_run_label)
        )
        if not dry_run:
            os.makedirs(dump_path, mode=0o700, exist_ok=True)
            execute_command(command, extra_environment=extra_environment)
示例#13
0
def test_execute_command_calls_full_command_without_capturing_output():
    full_command = ['foo', 'bar']
    flexmock(module.os, environ={'a': 'b'})
    flexmock(module.subprocess).should_receive('Popen').with_args(
        full_command, stdin=None, stdout=None, stderr=None, shell=False, env=None, cwd=None
    ).and_return(flexmock(wait=lambda: 0)).once()
    flexmock(module).should_receive('exit_code_indicates_error').and_return(False)
    flexmock(module).should_receive('log_outputs')

    output = module.execute_command(full_command, output_file=module.DO_NOT_CAPTURE)

    assert output is None
示例#14
0
def extract_archive(
    dry_run,
    repository,
    archive,
    restore_paths,
    location_config,
    storage_config,
    local_path='borg',
    remote_path=None,
    progress=False,
):
    '''
    Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to
    restore from the archive, and location/storage configuration dicts, extract the archive into the
    current directory.
    '''
    umask = storage_config.get('umask', None)
    lock_wait = storage_config.get('lock_wait', None)

    full_command = (
        (local_path, 'extract') +
        (('--remote-path', remote_path) if remote_path else ()) +
        (('--numeric-owner', ) if location_config.get('numeric_owner') else
         ()) + (('--umask', str(umask)) if umask else
                ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) +
        (('--info', ) if logger.getEffectiveLevel() == logging.INFO else
         ()) + (('--debug', '--list',
                 '--show-rc') if logger.isEnabledFor(logging.DEBUG) else
                ()) + (('--dry-run', ) if dry_run else
                       ()) + (('--progress', ) if progress else
                              ()) + ('::'.join((repository, archive)), ) +
        (tuple(restore_paths) if restore_paths else ()))

    # The progress output isn't compatible with captured and logged output, as progress messes with
    # the terminal directly.
    if progress:
        execute_command_without_capture(full_command)
        return

    execute_command(full_command)
示例#15
0
def prune_archives(
    dry_run,
    repository,
    storage_config,
    retention_config,
    local_path='borg',
    remote_path=None,
    stats=False,
    files=False,
):
    '''
    Given dry-run flag, a local or remote repository path, a storage config dict, and a
    retention config dict, prune Borg archives according to the retention policy specified in that
    configuration.
    '''
    umask = storage_config.get('umask', None)
    lock_wait = storage_config.get('lock_wait', None)
    extra_borg_options = storage_config.get('extra_borg_options', {}).get('prune', '')

    full_command = (
        (local_path, 'prune')
        + tuple(element for pair in _make_prune_flags(retention_config) for element in pair)
        + (('--remote-path', remote_path) if remote_path else ())
        + (('--umask', str(umask)) if umask else ())
        + (('--lock-wait', str(lock_wait)) if lock_wait else ())
        + (('--stats',) if stats and not dry_run else ())
        + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
        + (('--list',) if files else ())
        + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
        + (('--dry-run',) if dry_run else ())
        + (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
        + (repository,)
    )

    if (stats or files) and logger.getEffectiveLevel() == logging.WARNING:
        output_log_level = logging.WARNING
    else:
        output_log_level = logging.INFO

    execute_command(full_command, output_log_level=output_log_level, borg_local_path=local_path)
示例#16
0
def mount_archive(
    repository,
    archive,
    mount_point,
    paths,
    foreground,
    options,
    storage_config,
    local_path='borg',
    remote_path=None,
):
    '''
    Given a local or remote repository path, an optional archive name, a filesystem mount point,
    zero or more paths to mount from the archive, extra Borg mount options, a storage configuration
    dict, and optional local and remote Borg paths, mount the archive onto the mount point.
    '''
    umask = storage_config.get('umask', None)
    lock_wait = storage_config.get('lock_wait', None)

    full_command = (
        (local_path, 'mount') +
        (('--remote-path', remote_path) if remote_path else
         ()) + (('--umask', str(umask)) if umask else
                ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) +
        (('--info', ) if logger.getEffectiveLevel() == logging.INFO else ()) +
        (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else
         ()) + (('--foreground', ) if foreground else
                ()) + (('-o', options) if options else ()) + (('::'.join(
                    (repository, archive)), ) if archive else (repository, )) +
        (mount_point, ) + (tuple(paths) if paths else ()))

    # Don't capture the output when foreground mode is used so that ctrl-C can work properly.
    if foreground:
        execute_command(full_command,
                        output_file=DO_NOT_CAPTURE,
                        borg_local_path=local_path)
        return

    execute_command(full_command, borg_local_path=local_path)
示例#17
0
文件: mysql.py 项目: wellic/borgmatic
def dump_databases(databases, log_prefix, location_config, dry_run):
    '''
    Dump the given MySQL/MariaDB databases to disk. The databases are supplied as a sequence of
    dicts, one dict describing each database as per the configuration schema. Use the given log
    prefix in any log entries. Use the given location configuration dict to construct the
    destination path. If this is a dry run, then don't actually dump anything.
    '''
    dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''

    logger.info('{}: Dumping MySQL databases{}'.format(log_prefix, dry_run_label))

    for database in databases:
        name = database['name']
        dump_filename = dump.make_database_dump_filename(
            make_dump_path(location_config), name, database.get('hostname')
        )
        command = (
            ('mysqldump', '--add-drop-database')
            + (('--host', database['hostname']) if 'hostname' in database else ())
            + (('--port', str(database['port'])) if 'port' in database else ())
            + (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ())
            + (('--user', database['username']) if 'username' in database else ())
            + (tuple(database['options'].split(' ')) if 'options' in database else ())
            + (('--all-databases',) if name == 'all' else ('--databases', name))
        )
        extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None

        logger.debug(
            '{}: Dumping MySQL database {} to {}{}'.format(
                log_prefix, name, dump_filename, dry_run_label
            )
        )
        if not dry_run:
            os.makedirs(os.path.dirname(dump_filename), mode=0o700, exist_ok=True)
            execute_command(
                command, output_file=open(dump_filename, 'w'), extra_environment=extra_environment
            )
示例#18
0
def test_execute_command_without_run_to_completion_returns_process():
    full_command = ['foo', 'bar']
    process = flexmock()
    flexmock(module.os, environ={'a': 'b'})
    flexmock(module.subprocess).should_receive('Popen').with_args(
        full_command,
        stdin=None,
        stdout=module.subprocess.PIPE,
        stderr=module.subprocess.STDOUT,
        shell=False,
        env=None,
        cwd=None,
    ).and_return(process).once()
    flexmock(module).should_receive('log_outputs')

    assert module.execute_command(full_command, run_to_completion=False) == process
示例#19
0
def test_execute_command_calls_full_command():
    full_command = ['foo', 'bar']
    flexmock(module.os, environ={'a': 'b'})
    flexmock(module.subprocess).should_receive('Popen').with_args(
        full_command,
        stdin=None,
        stdout=module.subprocess.PIPE,
        stderr=module.subprocess.STDOUT,
        shell=False,
        env=None,
        cwd=None,
    ).and_return(flexmock(stdout=None)).once()
    flexmock(module).should_receive('log_outputs')

    output = module.execute_command(full_command)

    assert output is None
示例#20
0
def resolve_archive_name(repository,
                         archive,
                         storage_config,
                         local_path='borg',
                         remote_path=None):
    '''
    Given a local or remote repository path, an archive name, a storage config dict, a local Borg
    path, and a remote Borg path, simply return the archive name. But if the archive name is
    "latest", then instead introspect the repository for the latest successful (non-checkpoint)
    archive, and return its name.

    Raise ValueError if "latest" is given but there are no archives in the repository.
    '''
    if archive != "latest":
        return archive

    lock_wait = storage_config.get('lock_wait', None)

    full_command = (
        (local_path, 'list') +
        (('--info', ) if logger.getEffectiveLevel() == logging.INFO else ()) +
        (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else
         ()) + make_flags('remote-path', remote_path) +
        make_flags('lock-wait', lock_wait) +
        make_flags('glob-archives', BORG_EXCLUDE_CHECKPOINTS_GLOB) +
        make_flags('last', 1) + ('--short', repository))

    output = execute_command(full_command,
                             output_log_level=None,
                             error_on_warnings=False)
    try:
        latest_archive = output.strip().splitlines()[-1]
    except IndexError:
        raise ValueError('No archives found in the repository')

    logger.debug('{}: Latest archive is {}'.format(repository, latest_archive))

    return latest_archive
示例#21
0
def dump_databases(databases, log_prefix, location_config, dry_run):
    '''
    Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence
    of dicts, one dict describing each database as per the configuration schema. Use the given log
    prefix in any log entries. Use the given location configuration dict to construct the
    destination path.

    Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
    pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
    '''
    dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
    processes = []

    logger.info('{}: Dumping MySQL databases{}'.format(log_prefix,
                                                       dry_run_label))

    for database in databases:
        requested_name = database['name']
        dump_filename = dump.make_database_dump_filename(
            make_dump_path(location_config), requested_name,
            database.get('hostname'))
        extra_environment = {
            'MYSQL_PWD': database['password']
        } if 'password' in database else None
        dump_database_names = database_names_to_dump(database,
                                                     extra_environment,
                                                     log_prefix, dry_run_label)
        if not dump_database_names:
            raise ValueError('Cannot find any MySQL databases to dump.')

        dump_command = (
            ('mysqldump', ) + ('--add-drop-database', ) +
            (('--host', database['hostname']) if 'hostname' in database else
             ()) +
            (('--port', str(database['port'])) if 'port' in database else ()) +
            (('--protocol',
              'tcp') if 'hostname' in database or 'port' in database else ()) +
            (('--user', database['username']) if 'username' in database else
             ()) + (tuple(database['options'].split(' '))
                    if 'options' in database else
                    ()) + ('--databases', ) + dump_database_names
            # Use shell redirection rather than execute_command(output_file=open(...)) to prevent
            # the open() call on a named pipe from hanging the main borgmatic process.
            + ('>', dump_filename))

        logger.debug('{}: Dumping MySQL database {} to {}{}'.format(
            log_prefix, requested_name, dump_filename, dry_run_label))
        if dry_run:
            continue

        dump.create_named_pipe_for_dump(dump_filename)

        processes.append(
            execute_command(
                dump_command,
                shell=True,
                extra_environment=extra_environment,
                run_to_completion=False,
            ))

    return processes
示例#22
0
def create_archive(
    dry_run,
    repository,
    location_config,
    storage_config,
    local_path='borg',
    remote_path=None,
    progress=False,
    stats=False,
    json=False,
    files=False,
    stream_processes=None,
):
    '''
    Given vebosity/dry-run flags, a local or remote repository path, a location config dict, and a
    storage config dict, create a Borg archive and return Borg's JSON output (if any).

    If a sequence of stream processes is given (instances of subprocess.Popen), then execute the
    create command while also triggering the given processes to produce output.
    '''
    sources = deduplicate_directories(
        map_directories_to_devices(
            _expand_directories(
                location_config['source_directories'] +
                borgmatic_source_directories(
                    location_config.get('borgmatic_source_directory')))))

    pattern_file = _write_pattern_file(location_config.get('patterns'))
    exclude_file = _write_pattern_file(
        _expand_home_directories(location_config.get('exclude_patterns')))
    checkpoint_interval = storage_config.get('checkpoint_interval', None)
    chunker_params = storage_config.get('chunker_params', None)
    compression = storage_config.get('compression', None)
    remote_rate_limit = storage_config.get('remote_rate_limit', None)
    umask = storage_config.get('umask', None)
    lock_wait = storage_config.get('lock_wait', None)
    files_cache = location_config.get('files_cache')
    archive_name_format = storage_config.get('archive_name_format',
                                             DEFAULT_ARCHIVE_NAME_FORMAT)
    extra_borg_options = storage_config.get('extra_borg_options',
                                            {}).get('create', '')

    full_command = (
        (local_path, 'create') + _make_pattern_flags(
            location_config, pattern_file.name if pattern_file else None) +
        _make_exclude_flags(location_config,
                            exclude_file.name if exclude_file else None) +
        (('--checkpoint-interval',
          str(checkpoint_interval)) if checkpoint_interval else
         ()) + (('--chunker-params', chunker_params) if chunker_params else
                ()) + (('--compression', compression) if compression else ()) +
        (('--remote-ratelimit',
          str(remote_rate_limit)) if remote_rate_limit else
         ()) + (('--one-file-system', ) if
                location_config.get('one_file_system') or stream_processes else
                ()) +
        (('--numeric-owner', ) if location_config.get('numeric_owner') else
         ()) + (('--noatime', ) if location_config.get('atime') is False else
                ()) +
        (('--noctime', ) if location_config.get('ctime') is False else ()) +
        (('--nobirthtime', ) if location_config.get('birthtime') is False else
         ()) + (('--read-special', ) if
                (location_config.get('read_special') or stream_processes) else
                ()) +
        (('--nobsdflags', ) if location_config.get('bsd_flags') is False else
         ()) + (('--files-cache', files_cache) if files_cache else
                ()) + (('--remote-path', remote_path) if remote_path else
                       ()) + (('--umask', str(umask)) if umask else ()) +
        (('--lock-wait', str(lock_wait)) if lock_wait else
         ()) + (('--list', '--filter',
                 'AME-') if files and not json and not progress else ()) +
        (('--info', )
         if logger.getEffectiveLevel() == logging.INFO and not json else
         ()) + (('--stats', ) if stats and not json and not dry_run else ()) +
        (('--debug',
          '--show-rc') if logger.isEnabledFor(logging.DEBUG) and not json else
         ()) + (('--dry-run', ) if dry_run else
                ()) + (('--progress', ) if progress else ()) +
        (('--json', ) if json else
         ()) + (tuple(extra_borg_options.split(' ')) if extra_borg_options else
                ()) + ('{repository}::{archive_name_format}'.format(
                    repository=repository,
                    archive_name_format=archive_name_format), ) + sources)

    if json:
        output_log_level = None
    elif (stats or files) and logger.getEffectiveLevel() == logging.WARNING:
        output_log_level = logging.WARNING
    else:
        output_log_level = logging.INFO

    # The progress output isn't compatible with captured and logged output, as progress messes with
    # the terminal directly.
    output_file = DO_NOT_CAPTURE if progress else None

    if stream_processes:
        return execute_command_with_processes(
            full_command,
            stream_processes,
            output_log_level,
            output_file,
            borg_local_path=local_path,
        )

    return execute_command(full_command,
                           output_log_level,
                           output_file,
                           borg_local_path=local_path)
示例#23
0
def create_archive(
    dry_run,
    repository,
    location_config,
    storage_config,
    local_path='borg',
    remote_path=None,
    progress=False,
    stats=False,
    json=False,
):
    '''
    Given vebosity/dry-run flags, a local or remote repository path, a location config dict, and a
    storage config dict, create a Borg archive and return Borg's JSON output (if any).
    '''
    sources = _expand_directories(location_config['source_directories'])

    pattern_file = _write_pattern_file(location_config.get('patterns'))
    exclude_file = _write_pattern_file(
        _expand_home_directories(location_config.get('exclude_patterns')))
    checkpoint_interval = storage_config.get('checkpoint_interval', None)
    chunker_params = storage_config.get('chunker_params', None)
    compression = storage_config.get('compression', None)
    remote_rate_limit = storage_config.get('remote_rate_limit', None)
    umask = storage_config.get('umask', None)
    lock_wait = storage_config.get('lock_wait', None)
    files_cache = location_config.get('files_cache')
    default_archive_name_format = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}'
    archive_name_format = storage_config.get('archive_name_format',
                                             default_archive_name_format)

    full_command = (
        (local_path, 'create') + _make_pattern_flags(
            location_config, pattern_file.name if pattern_file else None) +
        _make_exclude_flags(location_config,
                            exclude_file.name if exclude_file else None) +
        (('--checkpoint-interval',
          str(checkpoint_interval)) if checkpoint_interval else
         ()) + (('--chunker-params', chunker_params) if chunker_params else
                ()) + (('--compression', compression) if compression else ()) +
        (('--remote-ratelimit',
          str(remote_rate_limit)) if remote_rate_limit else ()) +
        (('--one-file-system', ) if location_config.get('one_file_system') else
         ()) +
        (('--numeric-owner', ) if location_config.get('numeric_owner') else
         ()) + (('--noatime', ) if location_config.get('atime') is False else
                ()) +
        (('--noctime', ) if location_config.get('ctime') is False else ()) +
        (('--nobirthtime', ) if location_config.get('birthtime') is False else
         ()) +
        (('--read-special', ) if location_config.get('read_special') else ()) +
        (('--nobsdflags', ) if location_config.get('bsd_flags') is False else
         ()) + (('--files-cache', files_cache) if files_cache else
                ()) + (('--remote-path', remote_path) if remote_path else
                       ()) + (('--umask', str(umask)) if umask else ()) +
        (('--lock-wait', str(lock_wait)) if lock_wait else
         ()) + (('--list', '--filter',
                 'AME-') if logger.isEnabledFor(logging.INFO) and not json else
                ()) +
        (('--info', )
         if logger.getEffectiveLevel() == logging.INFO and not json else
         ()) + (('--stats', ) if not dry_run and
                (logger.isEnabledFor(logging.INFO) or stats) and not json else
                ()) +
        (('--debug',
          '--show-rc') if logger.isEnabledFor(logging.DEBUG) and not json else
         ()) + (('--dry-run', ) if dry_run else ()) +
        (('--progress', ) if progress else
         ()) + (('--json', ) if json else
                ()) + ('{repository}::{archive_name_format}'.format(
                    repository=repository,
                    archive_name_format=archive_name_format), ) + sources)

    if json:
        output_log_level = None
    elif stats:
        output_log_level = logging.WARNING
    else:
        output_log_level = logging.INFO

    return execute_command(full_command, output_log_level)
示例#24
0
def dump_databases(databases, log_prefix, location_config, dry_run):
    '''
    Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of
    dicts, one dict describing each database as per the configuration schema. Use the given log
    prefix in any log entries. Use the given location configuration dict to construct the
    destination path.

    Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
    pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
    '''
    dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
    processes = []

    logger.info('{}: Dumping PostgreSQL databases{}'.format(
        log_prefix, dry_run_label))

    for database in databases:
        name = database['name']
        dump_filename = dump.make_database_dump_filename(
            make_dump_path(location_config), name, database.get('hostname'))
        all_databases = bool(name == 'all')
        dump_format = database.get('format', 'custom')
        command = (
            (
                'pg_dumpall' if all_databases else 'pg_dump',
                '--no-password',
                '--clean',
                '--if-exists',
            ) +
            (('--host', database['hostname']) if 'hostname' in database else
             ()) +
            (('--port', str(database['port'])) if 'port' in database else
             ()) + (('--username',
                     database['username']) if 'username' in database else
                    ()) + (() if all_databases else
                           ('--format', dump_format)) +
            (('--file', dump_filename) if dump_format == 'directory' else
             ()) + (tuple(database['options'].split(' ')) if 'options'
                    in database else ()) + (() if all_databases else (name, ))
            # Use shell redirection rather than the --file flag to sidestep synchronization issues
            # when pg_dump/pg_dumpall tries to write to a named pipe. But for the directory dump
            # format in a particular, a named destination is required, and redirection doesn't work.
            + (('>', dump_filename) if dump_format != 'directory' else ()))
        extra_environment = {
            'PGPASSWORD': database['password']
        } if 'password' in database else None

        logger.debug('{}: Dumping PostgreSQL database {} to {}{}'.format(
            log_prefix, name, dump_filename, dry_run_label))
        if dry_run:
            continue

        if dump_format == 'directory':
            dump.create_parent_directory_for_dump(dump_filename)
        else:
            dump.create_named_pipe_for_dump(dump_filename)

        processes.append(
            execute_command(command,
                            shell=True,
                            extra_environment=extra_environment,
                            run_to_completion=False))

    return processes
示例#25
0
def extract_archive(
    dry_run,
    repository,
    archive,
    paths,
    location_config,
    storage_config,
    local_path='borg',
    remote_path=None,
    destination_path=None,
    progress=False,
    extract_to_stdout=False,
):
    '''
    Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to
    restore from the archive, location/storage configuration dicts, optional local and remote Borg
    paths, and an optional destination path to extract to, extract the archive into the current
    directory.

    If extract to stdout is True, then start the extraction streaming to stdout, and return that
    extract process as an instance of subprocess.Popen.
    '''
    umask = storage_config.get('umask', None)
    lock_wait = storage_config.get('lock_wait', None)

    if progress and extract_to_stdout:
        raise ValueError('progress and extract_to_stdout cannot both be set')

    full_command = (
        (local_path, 'extract')
        + (('--remote-path', remote_path) if remote_path else ())
        + (('--numeric-owner',) if location_config.get('numeric_owner') else ())
        + (('--umask', str(umask)) if umask else ())
        + (('--lock-wait', str(lock_wait)) if lock_wait else ())
        + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
        + (('--debug', '--list', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
        + (('--dry-run',) if dry_run else ())
        + (('--progress',) if progress else ())
        + (('--stdout',) if extract_to_stdout else ())
        + ('::'.join((repository if ':' in repository else os.path.abspath(repository), archive)),)
        + (tuple(paths) if paths else ())
    )

    # The progress output isn't compatible with captured and logged output, as progress messes with
    # the terminal directly.
    if progress:
        return execute_command(
            full_command, output_file=DO_NOT_CAPTURE, working_directory=destination_path
        )
        return None

    if extract_to_stdout:
        return execute_command(
            full_command,
            output_file=subprocess.PIPE,
            working_directory=destination_path,
            run_to_completion=False,
        )

    # Don't give Borg local path, so as to error on warnings, as Borg only gives a warning if the
    # restore paths don't exist in the archive!
    execute_command(full_command, working_directory=destination_path)