def test__get_destination_ssh_invalid_port(config_content):
    s_config = config_content.format(destination="ssh", port='foo')
    buf = StringIO.StringIO(s_config)
    config = ConfigParser.ConfigParser()
    config.readfp(buf)
    with pytest.raises(ValueError):
        get_destination(config)
Ejemplo n.º 2
0
def restore_mysql(cfg, dst, backup_copy, cache):
    """Restore from mysql backup"""
    LOG.debug('mysql: %r', cfg)

    if not backup_copy:
        LOG.info('No backup copy specified. Choose one from below:')
        list_available_backups(cfg)
        exit(1)

    try:
        ensure_empty(dst)
        dst_storage = get_destination(
            cfg,
            get_hostname_from_backup_copy(backup_copy)
        )
        key = dst_storage.basename(backup_copy)
        copy = dst_storage.status()[key]
        if cache:
            restore_from_mysql(cfg, copy, dst, cache=Cache(cache))
        else:
            restore_from_mysql(cfg, copy, dst)

    except (TwinDBBackupError, CacheException) as err:
        LOG.error(err)
        exit(1)
    except (OSError, IOError) as err:
        LOG.error(err)
        exit(1)
Ejemplo n.º 3
0
def verify_mysql_backup(config, dst_path, backup_copy, hostname=None):
    """Restore mysql backup and measure time"""
    if backup_copy == "latest":
        dst = get_destination(config, hostname)
        url = dst.get_latest_backup()
    else:
        url = backup_copy
    if url is None:
        return json.dumps({
            'backup_copy': url,
            'restore_time': 0,
            'success': False
        }, indent=4, sort_keys=True)
    start_restore_time = time.time()
    success = True
    try:
        tmp_dir = tempfile.mkdtemp()
        restore_from_mysql(config, url, dst_path, tmp_dir)
        edit_backup_my_cnf(dst_path)
        shutil.rmtree(tmp_dir)
    except (TwinDBBackupError, OSError, IOError) as err:
        LOG.error(err)
        success = False
    end_restore_time = time.time()
    restore_time = end_restore_time - start_restore_time
    return json.dumps({
        'backup_copy': url,
        'restore_time': restore_time,
        'success': success
    }, indent=4, sort_keys=True)
Ejemplo n.º 4
0
def status(cfg, copy_type, hostname):
    """Print backups status"""
    dst = get_destination(cfg, hostname)
    print(
        dst.status(
            cls=MEDIA_STATUS_MAP[copy_type]
        )
    )
def test__get_destination_ssh_valid_port(config_content):
    s_config = config_content.format(destination="ssh", port=4321)
    buf = StringIO.StringIO(s_config)
    config = ConfigParser.ConfigParser()
    config.readfp(buf)
    dst = get_destination(config)
    assert isinstance(dst, Ssh)
    assert dst.client.port == 4321
Ejemplo n.º 6
0
def test__get_destination_ssh_valid_port_as_str(config_content):
    s_config = config_content.format(destination="ssh", port='1234')
    buf = StringIO.StringIO(s_config)
    config = ConfigParser.ConfigParser()
    config.readfp(buf)
    dst = get_destination(config)
    assert isinstance(dst, Ssh)
    assert dst._ssh_client.ssh_connect_info.port == 1234
Ejemplo n.º 7
0
def restore_from_file(config, backup_copy, dst_dir):
    """
    Restore a directory from a backup copy in the directory

    :param config: Tool configuration.
    :type config: ConfigParser.ConfigParser
    :param backup_copy: Backup name.
    :type backup_copy: str
    :param dst_dir: Path to destination directory. Must exist and be empty.
    :type dst_dir: str
    """
    LOG.info('Restoring %s in %s', backup_copy, dst_dir)
    mkdir_p(dst_dir)
    restore_start = time.time()
    if os.path.exists(backup_copy):
        dst = Local(backup_copy)
        stream = dst.get_stream(backup_copy)
    else:
        dst = get_destination(config)
        stream = dst.get_stream(backup_copy)
        # GPG modifier
        try:
            gpg = Gpg(stream,
                      config.get('gpg', 'recipient'),
                      config.get('gpg', 'keyring'),
                      secret_keyring=config.get('gpg', 'secret_keyring'))
            LOG.debug('Decrypting stream')
            stream = gpg.revert_stream()
        except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
            LOG.debug('Not decrypting the stream')

    with stream as handler:
        try:
            LOG.debug('handler type: %s', type(handler))
            LOG.debug('stream type: %s', type(stream))
            cmd = ["tar", "zvxf", "-"]
            LOG.debug('Running %s', ' '.join(cmd))
            proc = Popen(cmd, stdin=handler, cwd=dst_dir)
            cout, cerr = proc.communicate()
            ret = proc.returncode
            if ret:
                LOG.error('%s exited with code %d', cmd, ret)
                if cout:
                    LOG.error('STDOUT: %s', cout)
                if cerr:
                    LOG.error('STDERR: %s', cerr)
                return
            LOG.info('Successfully restored %s in %s', backup_copy, dst_dir)
        except (OSError, DestinationError) as err:
            LOG.error('Failed to decompress %s: %s', backup_copy, err)
            exit(1)

    export_info(config,
                data=time.time() - restore_start,
                category=ExportCategory.files,
                measure_type=ExportMeasureType.restore)
Ejemplo n.º 8
0
def backup_binlogs(run_type, config):  # pylint: disable=too-many-locals
    """Copy MySQL binlog files to the backup destination.

    :param run_type: Run type
    :type run_type: str
    :param config: Tool configuration
    :type config: ConfigParser.ConfigParser
    """
    dst = get_destination(config)
    status = dst.status(cls=BinlogStatus)
    try:
        mysql_client = MySQLClient(
            defaults_file=config.get('mysql', 'mysql_defaults_file'))
    except ConfigParser.NoSectionError:
        LOG.debug('No mysql section in the config. Not backing up binlogs')
        return

    last_copy = status.get_latest_backup()
    LOG.debug('Latest copied binlog %s', last_copy)
    with mysql_client.cursor() as cur:
        cur.execute("FLUSH BINARY LOGS")
        backup_set = binlogs_to_backup(
            cur, last_binlog=last_copy.name if last_copy else None)
        cur.execute("SELECT @@log_bin_basename")
        row = cur.fetchone()
        binlog_dir = osp.dirname(row['@@log_bin_basename'])

    for binlog_name in backup_set:
        src = BinlogSource(run_type, mysql_client, binlog_name)
        binlog_copy = BinlogCopy(
            src.host, binlog_name,
            BinlogParser(osp.join(binlog_dir, binlog_name)).created_at)
        _backup_stream(config, src, dst)
        status.add(binlog_copy)

    try:
        expire_log_days = config.get('mysql', 'expire_log_days')
    except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
        expire_log_days = 7

    for copy in status:
        now = int(time.time())
        LOG.debug('Reviewing copy %s. Now: %d', copy, now)

        if copy.created_at < now - expire_log_days * 24 * 3600:
            LOG.debug('Deleting copy that was taken %d seconds ago',
                      now - copy.created_at)
            dst.delete(copy.key + ".gz")
            status.remove(copy.key)

    dst.status(status, cls=BinlogStatus)
Ejemplo n.º 9
0
def share(config, s3_url):
    """
    Function for generate make public file and get public url

    :param config: Config file
    :param s3_url: S3 url to file
    :type s3_url: str
    :raise: TwinDBBackupError
    """
    dst = get_destination(config)
    try:
        print(dst.share(s3_url))
    except NotImplementedError as err:
        raise TwinDBBackupError(err)
Ejemplo n.º 10
0
def verify_mysql_backup(config, dst_path, backup_copy, hostname=None):
    """Restore mysql backup and measure time"""
    dst = get_destination(config, hostname)
    status = dst.status()
    if backup_copy == "latest":
        copy = status.get_latest_backup()
    else:
        key = dst.basename(backup_copy)
        copy = status[key]

    if copy is None:
        return json.dumps(
            {
                'backup_copy': backup_copy,
                'restore_time': 0,
                'success': False
            },
            indent=4,
            sort_keys=True)
    start_restore_time = time.time()
    success = True
    tmp_dir = tempfile.mkdtemp()

    try:

        LOG.debug('Verifying backup copy in %s', tmp_dir)
        restore_from_mysql(config, copy, dst_path, tmp_dir)
        edit_backup_my_cnf(dst_path)

    except (TwinDBBackupError, OSError, IOError) as err:

        LOG.error(err)
        LOG.debug(traceback.format_exc())
        success = False

    finally:

        shutil.rmtree(tmp_dir, ignore_errors=True)

    end_restore_time = time.time()
    restore_time = end_restore_time - start_restore_time
    return json.dumps(
        {
            'backup_copy': copy.key,
            'restore_time': restore_time,
            'success': success
        },
        indent=4,
        sort_keys=True)
Ejemplo n.º 11
0
def backup_files(run_type, config):
    """Backup local directories

    :param run_type: Run type
    :type run_type: str
    :param config: Configuration
    :type config: ConfigParser.ConfigParser
    """
    for directory in get_directories_to_backup(config):
        LOG.debug('copying %s', directory)
        src = FileSource(directory, run_type)
        dst = get_destination(config)

        stream = src.get_stream()

        # Gzip modifier
        stream = Gzip(stream).get_stream()
        src.suffix += '.gz'

        # KeepLocal modifier
        try:
            keep_local_path = config.get('destination', 'keep_local_path')
            # src.suffix = 'tar.gz.aaa'
            dst_name = src.get_name()
            kl_modifier = KeepLocal(stream,
                                    os.path.join(keep_local_path, dst_name))
            stream = kl_modifier.get_stream()
        except ConfigParser.NoOptionError:
            pass

        # GPG modifier
        try:
            keyring = config.get('gpg', 'keyring')
            recipient = config.get('gpg', 'recipient')
            gpg = Gpg(stream, recipient, keyring)
            stream = gpg.get_stream()
            src.suffix += '.gpg'
        except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
            pass
        except ModifierException as err:
            LOG.warning(err)
            LOG.warning('Will skip encryption')

        dst.save(stream, src.get_name())

        src.apply_retention_policy(dst, config, run_type)
Ejemplo n.º 12
0
def backup_mysql(run_type, config):
    """Take backup of local MySQL instance

    :param run_type: Run type
    :type run_type: str
    :param config: Tool configuration
    :type config: ConfigParser.ConfigParser
    :return: None
    """
    try:
        if not config.getboolean('source', 'backup_mysql'):
            raise TwinDBBackupError('MySQL backups are not enabled in config')

    except (ConfigParser.NoOptionError, TwinDBBackupError) as err:
        LOG.debug(err)
        LOG.debug('Not backing up MySQL')
        return

    dst = get_destination(config)

    try:
        full_backup = config.get('mysql', 'full_backup')
    except ConfigParser.NoOptionError:
        full_backup = 'daily'
    backup_start = time.time()
    src = MySQLSource(
        MySQLConnectInfo(config.get('mysql', 'mysql_defaults_file')), run_type,
        full_backup, dst)

    callbacks = []
    src_name = _backup_stream(config, src, dst, callbacks)
    status = prepare_status(dst, src, run_type, src_name, backup_start)
    status = src.apply_retention_policy(dst, config, run_type, status)
    backup_duration = \
        status[run_type][src_name]['backup_finished'] - \
        status[run_type][src_name]['backup_started']
    export_info(config,
                data=backup_duration,
                category=ExportCategory.mysql,
                measure_type=ExportMeasureType.backup)
    dst.status(status)

    LOG.debug('Callbacks are %r', callbacks)
    for callback in callbacks:
        callback[0].callback(**callback[1])
Ejemplo n.º 13
0
def backup_files(run_type, config):
    """Backup local directories

    :param run_type: Run type
    :type run_type: str
    :param config: Configuration
    :type config: ConfigParser.ConfigParser
    """
    backup_start = time.time()
    for directory in get_directories_to_backup(config):
        LOG.debug('copying %s', directory)
        src = FileSource(directory, run_type)
        dst = get_destination(config)
        _backup_stream(config, src, dst)
        src.apply_retention_policy(dst, config, run_type)
    export_info(config,
                data=time.time() - backup_start,
                category=ExportCategory.files,
                measure_type=ExportMeasureType.backup)
Ejemplo n.º 14
0
def backup_mysql(run_type, config):
    """Take backup of local MySQL instance

    :param run_type: Run type
    :type run_type: str
    :param config: Tool configuration
    :type config: ConfigParser.ConfigParser
    :return: None
    """
    try:
        if not config.getboolean('source', 'backup_mysql'):
            raise TwinDBBackupError('MySQL backups are not enabled in config')

    except (ConfigParser.NoOptionError, TwinDBBackupError) as err:
        LOG.debug(err)
        LOG.debug('Not backing up MySQL')
        return

    dst = get_destination(config)

    try:
        full_backup = config.get('mysql', 'full_backup')
    except ConfigParser.NoOptionError:
        full_backup = 'daily'
    backup_start = time.time()
    try:
        xtrabackup_binary = config.get('mysql', 'xtrabackup_binary')
    except ConfigParser.NoOptionError:
        xtrabackup_binary = XTRABACKUP_BINARY

    status = dst.status()

    kwargs = {
        'backup_type': status.next_backup_type(full_backup, run_type),
        'dst': dst,
        'xtrabackup_binary': xtrabackup_binary
    }
    parent = status.eligble_parent(run_type)

    if kwargs['backup_type'] == 'incremental':
        kwargs['parent_lsn'] = parent.lsn

    LOG.debug('Creating source %r', kwargs)
    src = MySQLSource(
        MySQLConnectInfo(config.get('mysql', 'mysql_defaults_file')), run_type,
        **kwargs)

    callbacks = []
    _backup_stream(config, src, dst, callbacks=callbacks)
    LOG.debug('Backup copy name: %s', src.get_name())

    kwargs = {
        'type': src.type,
        'binlog': src.binlog_coordinate[0],
        'position': src.binlog_coordinate[1],
        'lsn': src.lsn,
        'backup_started': backup_start,
        'backup_finished': time.time(),
        'config_files': my_cnfs(MY_CNF_COMMON_PATHS)
    }
    if src.incremental:
        kwargs['parent'] = parent.key

    backup_copy = MySQLCopy(src.host, run_type, src.basename, **kwargs)
    status.add(backup_copy)

    status = src.apply_retention_policy(dst, config, run_type, status)
    LOG.debug('status after apply_retention_policy():\n%s', status)

    backup_duration = status.backup_duration(run_type, src.get_name())
    export_info(config,
                data=backup_duration,
                category=ExportCategory.mysql,
                measure_type=ExportMeasureType.backup)
    dst.status(status)

    LOG.debug('Callbacks are %r', callbacks)
    for callback in callbacks:
        callback[0].callback(**callback[1])
Ejemplo n.º 15
0
def status(cfg):
    """Print backups status"""
    dst = get_destination(cfg)
    print(json.dumps(dst.status(), indent=4, sort_keys=True))
Ejemplo n.º 16
0
def restore_from_mysql(config,
                       copy,
                       dst_dir,
                       tmp_dir=None,
                       cache=None,
                       hostname=None):
    """
    Restore MySQL datadir in a given directory

    :param config: Tool configuration.
    :type config: ConfigParser.ConfigParser
    :param copy: Backup copy instance.
    :type copy: MySQLCopy
    :param dst_dir: Destination directory. Must exist and be empty.
    :type dst_dir: str
    :param tmp_dir: Path to temp directory
    :type tmp_dir: str
    :param cache: Local cache object.
    :type cache: Cache
    :param hostname: Hostname
    :type hostname: str

    """
    LOG.info('Restoring %s in %s', copy, dst_dir)
    mkdir_p(dst_dir)

    dst = None
    restore_start = time.time()

    try:
        xtrabackup_binary = config.get('mysql', 'xtrabackup_binary')
    except ConfigParser.NoOptionError:
        xtrabackup_binary = XTRABACKUP_BINARY
    try:
        xbstream_binary = config.get('mysql', 'xbstream_binary')
    except ConfigParser.NoOptionError:
        xbstream_binary = XBSTREAM_BINARY

    try:
        keep_local_path = config.get('destination', 'keep_local_path')
        if osp.exists(osp.join(keep_local_path, copy.key)):
            dst = Local(keep_local_path)
    except ConfigParser.NoOptionError:
        pass

    if not dst:
        if not hostname:
            hostname = copy.host
            if not hostname:
                raise DestinationError('Failed to get hostname from %s' % copy)
        dst = get_destination(config, hostname=hostname)

    key = copy.key
    status = dst.status()

    stream = dst.get_stream(copy)

    if status[key].type == "full":

        cache_key = os.path.basename(key)
        if cache:
            if cache_key in cache:
                # restore from cache
                cache.restore_in(cache_key, dst_dir)
            else:
                restore_from_mysql_full(stream,
                                        dst_dir,
                                        config,
                                        redo_only=False,
                                        xtrabackup_binary=xtrabackup_binary,
                                        xbstream_binary=xbstream_binary)
                cache.add(dst_dir, cache_key)
        else:
            restore_from_mysql_full(stream,
                                    dst_dir,
                                    config,
                                    redo_only=False,
                                    xtrabackup_binary=xtrabackup_binary,
                                    xbstream_binary=xbstream_binary)

    else:
        full_copy = status.candidate_parent(copy.run_type)
        full_stream = dst.get_stream(full_copy)
        LOG.debug("Full parent copy is %s", full_copy.key)
        cache_key = os.path.basename(full_copy.key)

        if cache:
            if cache_key in cache:
                # restore from cache
                cache.restore_in(cache_key, dst_dir)
            else:
                restore_from_mysql_full(full_stream,
                                        dst_dir,
                                        config,
                                        redo_only=True,
                                        xtrabackup_binary=xtrabackup_binary,
                                        xbstream_binary=xbstream_binary)
                cache.add(dst_dir, cache_key)
        else:
            restore_from_mysql_full(full_stream,
                                    dst_dir,
                                    config,
                                    redo_only=True,
                                    xtrabackup_binary=xtrabackup_binary,
                                    xbstream_binary=xbstream_binary)

        restore_from_mysql_incremental(stream,
                                       dst_dir,
                                       config,
                                       tmp_dir,
                                       xtrabackup_binary=xtrabackup_binary,
                                       xbstream_binary=xbstream_binary)

    config_dir = os.path.join(dst_dir, "_config")

    for path, content in get_my_cnf(status, key):
        config_sub_dir = os.path.join(config_dir,
                                      os.path.dirname(path).lstrip('/'))
        mkdir_p(config_sub_dir, mode=0755)

        with open(os.path.join(config_sub_dir, os.path.basename(path)),
                  'w') as mysql_config:
            mysql_config.write(content)

    update_grastate(dst_dir, status, key)
    export_info(config,
                data=time.time() - restore_start,
                category=ExportCategory.mysql,
                measure_type=ExportMeasureType.restore)
    LOG.info('Successfully restored %s in %s.', copy.key, dst_dir)
    LOG.info(
        'Now copy content of %s to MySQL datadir: '
        'cp -R %s /var/lib/mysql/', dst_dir, osp.join(dst_dir, '*'))
    LOG.info('Fix permissions: chown -R mysql:mysql /var/lib/mysql/')
    LOG.info(
        'Make sure innodb_log_file_size and innodb_log_files_in_group '
        'in %s/backup-my.cnf and in /etc/my.cnf are same.', dst_dir)

    if osp.exists(config_dir):
        LOG.info('Original my.cnf is restored in %s.', config_dir)

    LOG.info('Then you can start MySQL normally.')
Ejemplo n.º 17
0
def restore_from_mysql(config, backup_copy, dst_dir, cache=None):
    """
    Restore MySQL datadir in a given directory

    :param config: Tool configuration.
    :type config: ConfigParser.ConfigParser
    :param backup_copy: Backup copy name.
    :type backup_copy: str
    :param dst_dir: Destination directory. Must exist and be empty.
    :type dst_dir: str
    :param cache: Local cache object.
    :type cache: Cache
    """
    LOG.info('Restoring %s in %s', backup_copy, dst_dir)
    mkdir_p(dst_dir)

    dst = None

    try:
        keep_local_path = config.get('destination', 'keep_local_path')
        if os.path.exists(backup_copy) \
                and backup_copy.startswith(keep_local_path):
            dst = Local(keep_local_path)
    except ConfigParser.NoOptionError:
        pass

    if not dst:
        hostname = get_hostname_from_backup_copy(backup_copy)
        if not hostname:
            raise DestinationError('Failed to get hostname from %s'
                                   % backup_copy)
        dst = get_destination(config, hostname=hostname)

    key = dst.basename(backup_copy)
    status = dst.status()

    stream = dst.get_stream(backup_copy)

    if get_backup_type(status, key) == "full":

        cache_key = os.path.basename(key)
        if cache:
            if cache_key in cache:
                # restore from cache
                cache.restore_in(cache_key, dst_dir)
            else:
                restore_from_mysql_full(stream, dst_dir, config)
                cache.add(dst_dir, cache_key)
        else:
            restore_from_mysql_full(stream, dst_dir, config)

    else:
        full_copy = dst.get_full_copy_name(backup_copy)

        full_stream = dst.get_stream(full_copy)

        cache_key = os.path.basename(full_copy)

        if cache:
            if cache_key in cache:
                # restore from cache
                cache.restore_in(cache_key, dst_dir)
            else:
                restore_from_mysql_full(full_stream, dst_dir,
                                        config, redo_only=True)
                cache.add(dst_dir, cache_key)
        else:
            restore_from_mysql_full(full_stream, dst_dir,
                                    config, redo_only=True)

        restore_from_mysql_incremental(stream, dst_dir, config)

    config_dir = os.path.join(dst_dir, "_config")

    for path, content in get_my_cnf(status, key):
        config_sub_dir = os.path.join(config_dir,
                                      os.path.dirname(path).lstrip('/'))
        os.makedirs(config_sub_dir)

        with open(os.path.join(config_sub_dir,
                               os.path.basename(path)), 'w') as mysql_config:
            mysql_config.write(content)

    update_grastate(dst_dir, status, key)

    LOG.info('Successfully restored %s in %s.', backup_copy, dst_dir)
    LOG.info('Now copy content of %s to MySQL datadir: '
             'cp -R %s/* /var/lib/mysql/', dst_dir, dst_dir)
    LOG.info('Fix permissions: chown -R mysql:mysql /var/lib/mysql/')
    LOG.info('Make sure innodb_log_file_size and innodb_log_files_in_group '
             'in %s/backup-my.cnf and in /etc/my.cnf are same.', dst_dir)

    if os.path.exists(config_dir):
        LOG.info('Original my.cnf is restored in %s.', config_dir)

    LOG.info('Then you can start MySQL normally.')
Ejemplo n.º 18
0
def status(cfg):
    """Print backups status"""
    dst = get_destination(cfg)
    print(dst.status())
Ejemplo n.º 19
0
def backup_mysql(run_type, config):
    """Take backup of local MySQL instance

    :param run_type: Run type
    :type run_type: str
    :param config: Tool configuration
    :type config: ConfigParser.ConfigParser
    :return: None
    """
    try:
        if not config.getboolean('source', 'backup_mysql'):
            raise TwinDBBackupError('MySQL backups are not enabled in config')

    except (ConfigParser.NoOptionError, TwinDBBackupError) as err:
        LOG.debug(err)
        LOG.debug('Not backing up MySQL')
        return

    dst = get_destination(config)

    try:
        full_backup = config.get('mysql', 'full_backup')
    except ConfigParser.NoOptionError:
        full_backup = 'daily'
    backup_start = time.time()
    src = MySQLSource(
        MySQLConnectInfo(config.get('mysql', 'mysql_defaults_file')), run_type,
        full_backup, dst)

    callbacks = []
    stream = src.get_stream()
    src_name = src.get_name()

    # Gzip modifier
    stream = Gzip(stream).get_stream()
    src_name += '.gz'

    # KeepLocal modifier
    try:
        keep_local_path = config.get('destination', 'keep_local_path')
        kl_modifier = KeepLocal(stream, os.path.join(keep_local_path,
                                                     src_name))
        stream = kl_modifier.get_stream()

        callbacks.append((kl_modifier, {
            'keep_local_path': keep_local_path,
            'dst': dst
        }))

    except ConfigParser.NoOptionError:
        LOG.debug('keep_local_path is not present in the config file')

    # GPG modifier
    try:
        stream = Gpg(stream, config.get('gpg', 'recipient'),
                     config.get('gpg', 'keyring')).get_stream()
        src_name += '.gpg'
    except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
        pass
    except ModifierException as err:
        LOG.warning(err)
        LOG.warning('Will skip encryption')

    if not dst.save(stream, src_name):
        LOG.error('Failed to save backup copy %s', src_name)
        exit(1)
    status = prepare_status(dst, src, run_type, src_name, backup_start)

    src.apply_retention_policy(dst, config, run_type, status)

    dst.status(status)

    LOG.debug('Callbacks are %r', callbacks)
    for callback in callbacks:
        callback[0].callback(**callback[1])