示例#1
0
文件: backup.py 项目: twindb/backup
def backup_files(run_type, config):
    """Backup local directories

    :param run_type: Run type
    :type run_type: str
    :param config: Configuration
    :type config: TwinDBBackupConfig
    """
    backup_start = time.time()
    try:
        for directory in config.backup_dirs:
            LOG.debug('copying %s', directory)
            src = FileSource(directory, run_type)
            dst = config.destination()
            _backup_stream(config, src, dst)
            src.apply_retention_policy(dst, config, run_type)
    except (
            DestinationError,
            SourceError,
            SshClientException
    ) as err:
        raise OperationError(err)
    export_info(config, data=time.time() - backup_start,
                category=ExportCategory.files,
                measure_type=ExportMeasureType.backup)
示例#2
0
def restore_from_file(twindb_config, copy, dst_dir):
    """
    Restore a directory from a backup copy in the directory

    :param twindb_config: tool configuration
    :type twindb_config: TwinDBBackupConfig
    :param copy: Instance of BaseCopy or and inheriting classes.
    :type copy: BaseCopy
    :param dst_dir: Path to destination directory. Must exist and be empty.
    :type dst_dir: str
    """
    LOG.info('Restoring %s in %s', copy.key, dst_dir)
    mkdir_p(dst_dir)
    restore_start = time.time()
    keep_local_path = twindb_config.keep_local_path

    if keep_local_path and os.path.exists(osp.join(keep_local_path, copy.key)):
        dst = Local(osp.join(keep_local_path, copy.key))
        stream = dst.get_stream(copy)
    else:
        dst = twindb_config.destination()
        stream = dst.get_stream(copy)

        # GPG modifier
        if twindb_config.gpg:
            gpg = Gpg(stream,
                      twindb_config.gpg.recipient,
                      twindb_config.gpg.keyring,
                      secret_keyring=twindb_config.gpg.secret_keyring)
            LOG.debug('Decrypting stream')
            stream = gpg.revert_stream()
        else:
            LOG.debug('Not decrypting the stream')

    with stream as handler:
        try:
            LOG.debug('handler type: %s', type(handler))
            LOG.debug('stream type: %s', type(stream))
            cmd = ["tar", "zvxf", "-"]
            LOG.debug('Running %s', ' '.join(cmd))
            proc = Popen(cmd, stdin=handler, cwd=dst_dir)
            cout, cerr = proc.communicate()
            ret = proc.returncode
            if ret:
                LOG.error('%s exited with code %d', cmd, ret)
                if cout:
                    LOG.error('STDOUT: %s', cout)
                if cerr:
                    LOG.error('STDERR: %s', cerr)
                return
            LOG.info('Successfully restored %s in %s', copy.key, dst_dir)
        except (OSError, DestinationError) as err:
            LOG.error('Failed to decompress %s: %s', copy.key, err)
            exit(1)

    export_info(twindb_config,
                data=time.time() - restore_start,
                category=ExportCategory.files,
                measure_type=ExportMeasureType.restore)
示例#3
0
def restore_from_file(config, backup_copy, dst_dir):
    """
    Restore a directory from a backup copy in the directory

    :param config: Tool configuration.
    :type config: ConfigParser.ConfigParser
    :param backup_copy: Backup name.
    :type backup_copy: str
    :param dst_dir: Path to destination directory. Must exist and be empty.
    :type dst_dir: str
    """
    LOG.info('Restoring %s in %s', backup_copy, dst_dir)
    mkdir_p(dst_dir)
    restore_start = time.time()
    if os.path.exists(backup_copy):
        dst = Local(backup_copy)
        stream = dst.get_stream(backup_copy)
    else:
        dst = get_destination(config)
        stream = dst.get_stream(backup_copy)
        # GPG modifier
        try:
            gpg = Gpg(stream,
                      config.get('gpg', 'recipient'),
                      config.get('gpg', 'keyring'),
                      secret_keyring=config.get('gpg', 'secret_keyring'))
            LOG.debug('Decrypting stream')
            stream = gpg.revert_stream()
        except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
            LOG.debug('Not decrypting the stream')

    with stream as handler:
        try:
            LOG.debug('handler type: %s', type(handler))
            LOG.debug('stream type: %s', type(stream))
            cmd = ["tar", "zvxf", "-"]
            LOG.debug('Running %s', ' '.join(cmd))
            proc = Popen(cmd, stdin=handler, cwd=dst_dir)
            cout, cerr = proc.communicate()
            ret = proc.returncode
            if ret:
                LOG.error('%s exited with code %d', cmd, ret)
                if cout:
                    LOG.error('STDOUT: %s', cout)
                if cerr:
                    LOG.error('STDERR: %s', cerr)
                return
            LOG.info('Successfully restored %s in %s', backup_copy, dst_dir)
        except (OSError, DestinationError) as err:
            LOG.error('Failed to decompress %s: %s', backup_copy, err)
            exit(1)

    export_info(config,
                data=time.time() - restore_start,
                category=ExportCategory.files,
                measure_type=ExportMeasureType.restore)
示例#4
0
文件: backup.py 项目: soft-way/backup
def backup_mysql(run_type, config):
    """Take backup of local MySQL instance

    :param run_type: Run type
    :type run_type: str
    :param config: Tool configuration
    :type config: ConfigParser.ConfigParser
    :return: None
    """
    try:
        if not config.getboolean('source', 'backup_mysql'):
            raise TwinDBBackupError('MySQL backups are not enabled in config')

    except (ConfigParser.NoOptionError, TwinDBBackupError) as err:
        LOG.debug(err)
        LOG.debug('Not backing up MySQL')
        return

    dst = get_destination(config)

    try:
        full_backup = config.get('mysql', 'full_backup')
    except ConfigParser.NoOptionError:
        full_backup = 'daily'
    backup_start = time.time()
    src = MySQLSource(
        MySQLConnectInfo(config.get('mysql', 'mysql_defaults_file')), run_type,
        full_backup, dst)

    callbacks = []
    src_name = _backup_stream(config, src, dst, callbacks)
    status = prepare_status(dst, src, run_type, src_name, backup_start)
    status = src.apply_retention_policy(dst, config, run_type, status)
    backup_duration = \
        status[run_type][src_name]['backup_finished'] - \
        status[run_type][src_name]['backup_started']
    export_info(config,
                data=backup_duration,
                category=ExportCategory.mysql,
                measure_type=ExportMeasureType.backup)
    dst.status(status)

    LOG.debug('Callbacks are %r', callbacks)
    for callback in callbacks:
        callback[0].callback(**callback[1])
示例#5
0
def backup_files(run_type, config):
    """Backup local directories

    :param run_type: Run type
    :type run_type: str
    :param config: Configuration
    :type config: ConfigParser.ConfigParser
    """
    backup_start = time.time()
    for directory in get_directories_to_backup(config):
        LOG.debug('copying %s', directory)
        src = FileSource(directory, run_type)
        dst = get_destination(config)
        _backup_stream(config, src, dst)
        src.apply_retention_policy(dst, config, run_type)
    export_info(config,
                data=time.time() - backup_start,
                category=ExportCategory.files,
                measure_type=ExportMeasureType.backup)
示例#6
0
def backup_files(run_type, config):
    """Backup local directories

    :param run_type: Run type
    :type run_type: str
    :param config: Configuration
    :type config: TwinDBBackupConfig
    """
    backup_start = time.time()
    try:
        for directory in config.backup_dirs:
            LOG.debug('copying %s', directory)
            src = FileSource(directory, run_type)
            dst = config.destination()
            _backup_stream(config, src, dst)
            src.apply_retention_policy(dst, config, run_type)
    except (DestinationError, SourceError, SshClientException) as err:
        raise OperationError(err)
    export_info(config,
                data=time.time() - backup_start,
                category=ExportCategory.files,
                measure_type=ExportMeasureType.backup)
示例#7
0
文件: restore.py 项目: twindb/backup
def restore_from_file(twindb_config, copy, dst_dir):
    """
    Restore a directory from a backup copy in the directory

    :param twindb_config: tool configuration
    :type twindb_config: TwinDBBackupConfig
    :param copy: Instance of BaseCopy or and inheriting classes.
    :type copy: BaseCopy
    :param dst_dir: Path to destination directory. Must exist and be empty.
    :type dst_dir: str
    """
    LOG.info('Restoring %s in %s', copy.key, dst_dir)
    mkdir_p(dst_dir)
    restore_start = time.time()
    keep_local_path = twindb_config.keep_local_path

    if keep_local_path and os.path.exists(osp.join(keep_local_path, copy.key)):
        dst = Local(osp.join(keep_local_path, copy.key))
        stream = dst.get_stream(copy)
    else:
        dst = twindb_config.destination()
        stream = dst.get_stream(copy)

        # GPG modifier
        if twindb_config.gpg:
            gpg = Gpg(
                stream,
                twindb_config.gpg.recipient,
                twindb_config.gpg.keyring,
                secret_keyring=twindb_config.gpg.secret_keyring
            )
            LOG.debug('Decrypting stream')
            stream = gpg.revert_stream()
        else:
            LOG.debug('Not decrypting the stream')

    with stream as handler:
        try:
            LOG.debug('handler type: %s', type(handler))
            LOG.debug('stream type: %s', type(stream))
            cmd = ["tar", "zvxf", "-"]
            LOG.debug('Running %s', ' '.join(cmd))
            proc = Popen(cmd, stdin=handler, cwd=dst_dir)
            cout, cerr = proc.communicate()
            ret = proc.returncode
            if ret:
                LOG.error('%s exited with code %d', cmd, ret)
                if cout:
                    LOG.error('STDOUT: %s', cout)
                if cerr:
                    LOG.error('STDERR: %s', cerr)
                return
            LOG.info('Successfully restored %s in %s', copy.key, dst_dir)
        except (OSError, DestinationError) as err:
            LOG.error('Failed to decompress %s: %s', copy.key, err)
            exit(1)

    export_info(
        twindb_config,
        data=time.time() - restore_start,
        category=ExportCategory.files,
        measure_type=ExportMeasureType.restore
    )
示例#8
0
文件: restore.py 项目: twindb/backup
def restore_from_mysql(twindb_config, copy, dst_dir,
                       tmp_dir=None,
                       cache=None,
                       hostname=None):
    """
    Restore MySQL datadir in a given directory

    :param twindb_config: tool configuration
    :type twindb_config: TwinDBBackupConfig
    :param copy: Backup copy instance.
    :type copy: MySQLCopy
    :param dst_dir: Destination directory. Must exist and be empty.
    :type dst_dir: str
    :param tmp_dir: Path to temp directory
    :type tmp_dir: str
    :param cache: Local cache object.
    :type cache: Cache
    :param hostname: Hostname
    :type hostname: str

    """
    LOG.info('Restoring %s in %s', copy, dst_dir)
    mkdir_p(dst_dir)

    dst = None
    restore_start = time.time()
    keep_local_path = twindb_config.keep_local_path
    if keep_local_path and osp.exists(osp.join(keep_local_path, copy.key)):
        dst = Local(twindb_config.keep_local_path)

    if not dst:
        if not hostname:
            hostname = copy.host
            if not hostname:
                raise DestinationError(
                    'Failed to get hostname from %s'
                    % copy
                )
        dst = twindb_config.destination(backup_source=hostname)

    key = copy.key
    status = MySQLStatus(dst=dst)

    stream = dst.get_stream(copy)

    if status[key].type == "full":

        cache_key = os.path.basename(key)
        if cache:
            if cache_key in cache:
                # restore from cache
                cache.restore_in(cache_key, dst_dir)
            else:
                restore_from_mysql_full(
                    stream,
                    dst_dir,
                    twindb_config,
                    redo_only=False
                )
                cache.add(dst_dir, cache_key)
        else:
            restore_from_mysql_full(
                stream,
                dst_dir,
                twindb_config,
                redo_only=False)

    else:
        full_copy = status.candidate_parent(
            copy.run_type
        )
        full_stream = dst.get_stream(full_copy)
        LOG.debug("Full parent copy is %s", full_copy.key)
        cache_key = os.path.basename(full_copy.key)

        if cache:
            if cache_key in cache:
                # restore from cache
                cache.restore_in(cache_key, dst_dir)
            else:
                restore_from_mysql_full(
                    full_stream,
                    dst_dir,
                    twindb_config,
                    redo_only=True
                )
                cache.add(dst_dir, cache_key)
        else:
            restore_from_mysql_full(
                full_stream,
                dst_dir,
                twindb_config,
                redo_only=True
            )

        restore_from_mysql_incremental(
            stream,
            dst_dir,
            twindb_config,
            tmp_dir
        )

    config_dir = os.path.join(dst_dir, "_config")

    for path, content in get_my_cnf(status, key):
        config_sub_dir = os.path.join(
            config_dir,
            os.path.dirname(path).lstrip('/')
        )
        mkdir_p(config_sub_dir, mode=0755)

        with open(os.path.join(config_sub_dir,
                               os.path.basename(path)), 'w') as mysql_config:
            mysql_config.write(content)

    update_grastate(dst_dir, status, key)
    export_info(twindb_config, data=time.time() - restore_start,
                category=ExportCategory.mysql,
                measure_type=ExportMeasureType.restore)
    LOG.info('Successfully restored %s in %s.', copy.key, dst_dir)
    LOG.info('Now copy content of %s to MySQL datadir: '
             'cp -R %s /var/lib/mysql/', dst_dir, osp.join(dst_dir, '*'))
    LOG.info('Fix permissions: chown -R mysql:mysql /var/lib/mysql/')
    LOG.info('Make sure innodb_log_file_size and innodb_log_files_in_group '
             'in %s/backup-my.cnf and in /etc/my.cnf are same.', dst_dir)

    if osp.exists(config_dir):
        LOG.info('Original my.cnf is restored in %s.', config_dir)

    LOG.info('Then you can start MySQL normally.')
示例#9
0
def backup_mysql(run_type, config):
    """Take backup of local MySQL instance

    :param run_type: Run type
    :type run_type: str
    :param config: Tool configuration
    :type config: TwinDBBackupConfig
    """
    if config.backup_mysql is False:
        LOG.debug("Not backing up MySQL")
        return

    dst = config.destination()

    try:
        full_backup = config.mysql.full_backup
    except configparser.NoOptionError:
        full_backup = "daily"
    backup_start = time.time()

    status = MySQLStatus(dst=dst)

    kwargs = {
        "backup_type": status.next_backup_type(full_backup, run_type),
        "dst": dst,
        "xtrabackup_binary": config.mysql.xtrabackup_binary,
    }
    parent = status.candidate_parent(run_type)

    if kwargs["backup_type"] == "incremental":
        kwargs["parent_lsn"] = parent.lsn

    LOG.debug("Creating source %r", kwargs)
    src = MySQLSource(MySQLConnectInfo(config.mysql.defaults_file), run_type,
                      **kwargs)

    callbacks = []
    try:
        _backup_stream(config, src, dst, callbacks=callbacks)
    except (DestinationError, SourceError, SshClientException) as err:
        raise OperationError(err)
    LOG.debug("Backup copy name: %s", src.get_name())

    kwargs = {
        "type": src.type,
        "binlog": src.binlog_coordinate[0],
        "position": src.binlog_coordinate[1],
        "lsn": src.lsn,
        "backup_started": backup_start,
        "backup_finished": time.time(),
        "config_files": my_cnfs(MY_CNF_COMMON_PATHS),
    }
    if src.incremental:
        kwargs["parent"] = parent.key

    backup_copy = MySQLCopy(src.host, run_type, src.basename, **kwargs)
    status.add(backup_copy)

    status = src.apply_retention_policy(dst, config, run_type, status)
    LOG.debug("status after apply_retention_policy():\n%s", status)

    backup_duration = backup_copy.duration
    export_info(
        config,
        data=backup_duration,
        category=ExportCategory.mysql,
        measure_type=ExportMeasureType.backup,
    )

    status.save(dst)

    LOG.debug("Callbacks are %r", callbacks)
    for callback in callbacks:
        callback[0].callback(**callback[1])
def restore_from_mysql(config,
                       copy,
                       dst_dir,
                       tmp_dir=None,
                       cache=None,
                       hostname=None):
    """
    Restore MySQL datadir in a given directory

    :param config: Tool configuration.
    :type config: ConfigParser.ConfigParser
    :param copy: Backup copy instance.
    :type copy: MySQLCopy
    :param dst_dir: Destination directory. Must exist and be empty.
    :type dst_dir: str
    :param tmp_dir: Path to temp directory
    :type tmp_dir: str
    :param cache: Local cache object.
    :type cache: Cache
    :param hostname: Hostname
    :type hostname: str

    """
    LOG.info('Restoring %s in %s', copy, dst_dir)
    mkdir_p(dst_dir)

    dst = None
    restore_start = time.time()

    try:
        xtrabackup_binary = config.get('mysql', 'xtrabackup_binary')
    except ConfigParser.NoOptionError:
        xtrabackup_binary = XTRABACKUP_BINARY
    try:
        xbstream_binary = config.get('mysql', 'xbstream_binary')
    except ConfigParser.NoOptionError:
        xbstream_binary = XBSTREAM_BINARY

    try:
        keep_local_path = config.get('destination', 'keep_local_path')
        if osp.exists(osp.join(keep_local_path, copy.key)):
            dst = Local(keep_local_path)
    except ConfigParser.NoOptionError:
        pass

    if not dst:
        if not hostname:
            hostname = copy.host
            if not hostname:
                raise DestinationError('Failed to get hostname from %s' % copy)
        dst = get_destination(config, hostname=hostname)

    key = copy.key
    status = dst.status()

    stream = dst.get_stream(copy)

    if status[key].type == "full":

        cache_key = os.path.basename(key)
        if cache:
            if cache_key in cache:
                # restore from cache
                cache.restore_in(cache_key, dst_dir)
            else:
                restore_from_mysql_full(stream,
                                        dst_dir,
                                        config,
                                        redo_only=False,
                                        xtrabackup_binary=xtrabackup_binary,
                                        xbstream_binary=xbstream_binary)
                cache.add(dst_dir, cache_key)
        else:
            restore_from_mysql_full(stream,
                                    dst_dir,
                                    config,
                                    redo_only=False,
                                    xtrabackup_binary=xtrabackup_binary,
                                    xbstream_binary=xbstream_binary)

    else:
        full_copy = status.candidate_parent(copy.run_type)
        full_stream = dst.get_stream(full_copy)
        LOG.debug("Full parent copy is %s", full_copy.key)
        cache_key = os.path.basename(full_copy.key)

        if cache:
            if cache_key in cache:
                # restore from cache
                cache.restore_in(cache_key, dst_dir)
            else:
                restore_from_mysql_full(full_stream,
                                        dst_dir,
                                        config,
                                        redo_only=True,
                                        xtrabackup_binary=xtrabackup_binary,
                                        xbstream_binary=xbstream_binary)
                cache.add(dst_dir, cache_key)
        else:
            restore_from_mysql_full(full_stream,
                                    dst_dir,
                                    config,
                                    redo_only=True,
                                    xtrabackup_binary=xtrabackup_binary,
                                    xbstream_binary=xbstream_binary)

        restore_from_mysql_incremental(stream,
                                       dst_dir,
                                       config,
                                       tmp_dir,
                                       xtrabackup_binary=xtrabackup_binary,
                                       xbstream_binary=xbstream_binary)

    config_dir = os.path.join(dst_dir, "_config")

    for path, content in get_my_cnf(status, key):
        config_sub_dir = os.path.join(config_dir,
                                      os.path.dirname(path).lstrip('/'))
        mkdir_p(config_sub_dir, mode=0755)

        with open(os.path.join(config_sub_dir, os.path.basename(path)),
                  'w') as mysql_config:
            mysql_config.write(content)

    update_grastate(dst_dir, status, key)
    export_info(config,
                data=time.time() - restore_start,
                category=ExportCategory.mysql,
                measure_type=ExportMeasureType.restore)
    LOG.info('Successfully restored %s in %s.', copy.key, dst_dir)
    LOG.info(
        'Now copy content of %s to MySQL datadir: '
        'cp -R %s /var/lib/mysql/', dst_dir, osp.join(dst_dir, '*'))
    LOG.info('Fix permissions: chown -R mysql:mysql /var/lib/mysql/')
    LOG.info(
        'Make sure innodb_log_file_size and innodb_log_files_in_group '
        'in %s/backup-my.cnf and in /etc/my.cnf are same.', dst_dir)

    if osp.exists(config_dir):
        LOG.info('Original my.cnf is restored in %s.', config_dir)

    LOG.info('Then you can start MySQL normally.')
示例#11
0
def restore_from_mysql(config, backup_copy, dst_dir, tmp_dir=None, cache=None):
    """
    Restore MySQL datadir in a given directory

    :param config: Tool configuration.
    :type config: ConfigParser.ConfigParser
    :param backup_copy: Backup copy name.
    :type backup_copy: str
    :param dst_dir: Destination directory. Must exist and be empty.
    :type dst_dir: str
    :param tmp_dir: Path to temp directory
    :type tmp_dir: str
    :param cache: Local cache object.
    :type cache: Cache
    """
    LOG.info('Restoring %s in %s', backup_copy, dst_dir)
    mkdir_p(dst_dir)

    dst = None
    restore_start = time.time()
    try:
        keep_local_path = config.get('destination', 'keep_local_path')
        if os.path.exists(backup_copy) and \
                backup_copy.startswith(keep_local_path):
            dst = Local(keep_local_path)
    except ConfigParser.NoOptionError:
        pass

    if not dst:
        hostname = get_hostname_from_backup_copy(backup_copy)
        if not hostname:
            raise DestinationError('Failed to get hostname from %s'
                                   % backup_copy)
        dst = get_destination(config, hostname=hostname)

    key = dst.basename(backup_copy)
    status = dst.status()

    stream = dst.get_stream(backup_copy)

    if get_backup_type(status, key) == "full":

        cache_key = os.path.basename(key)
        if cache:
            if cache_key in cache:
                # restore from cache
                cache.restore_in(cache_key, dst_dir)
            else:
                restore_from_mysql_full(stream, dst_dir, config, tmp_dir)
                cache.add(dst_dir, cache_key)
        else:
            restore_from_mysql_full(stream, dst_dir, config, tmp_dir)

    else:
        full_copy = dst.get_full_copy_name(backup_copy)

        full_stream = dst.get_stream(full_copy)

        cache_key = os.path.basename(full_copy)

        if cache:
            if cache_key in cache:
                # restore from cache
                cache.restore_in(cache_key, dst_dir)
            else:
                restore_from_mysql_full(full_stream, dst_dir,
                                        config, redo_only=True)
                cache.add(dst_dir, cache_key)
        else:
            restore_from_mysql_full(full_stream, dst_dir,
                                    config, redo_only=True)

        restore_from_mysql_incremental(stream, dst_dir, config, tmp_dir)

    config_dir = os.path.join(dst_dir, "_config")

    for path, content in get_my_cnf(status, key):
        config_sub_dir = os.path.join(config_dir,
                                      os.path.dirname(path).lstrip('/'))
        os.makedirs(config_sub_dir)

        with open(os.path.join(config_sub_dir,
                               os.path.basename(path)), 'w') as mysql_config:
            mysql_config.write(content)

    update_grastate(dst_dir, status, key)
    export_info(config, data=time.time() - restore_start,
                category=ExportCategory.mysql,
                measure_type=ExportMeasureType.restore)
    LOG.info('Successfully restored %s in %s.', backup_copy, dst_dir)
    LOG.info('Now copy content of %s to MySQL datadir: '
             'cp -R %s/* /var/lib/mysql/', dst_dir, dst_dir)
    LOG.info('Fix permissions: chown -R mysql:mysql /var/lib/mysql/')
    LOG.info('Make sure innodb_log_file_size and innodb_log_files_in_group '
             'in %s/backup-my.cnf and in /etc/my.cnf are same.', dst_dir)

    if os.path.exists(config_dir):
        LOG.info('Original my.cnf is restored in %s.', config_dir)

    LOG.info('Then you can start MySQL normally.')
示例#12
0
def backup_mysql(run_type, config):
    """Take backup of local MySQL instance

    :param run_type: Run type
    :type run_type: str
    :param config: Tool configuration
    :type config: TwinDBBackupConfig
    """
    if config.backup_mysql is False:
        LOG.debug('Not backing up MySQL')
        return

    dst = config.destination()

    try:
        full_backup = config.mysql.full_backup
    except ConfigParser.NoOptionError:
        full_backup = 'daily'
    backup_start = time.time()

    status = MySQLStatus(dst=dst)

    kwargs = {
        'backup_type': status.next_backup_type(full_backup, run_type),
        'dst': dst,
        'xtrabackup_binary': config.mysql.xtrabackup_binary
    }
    parent = status.candidate_parent(run_type)

    if kwargs['backup_type'] == 'incremental':
        kwargs['parent_lsn'] = parent.lsn

    LOG.debug('Creating source %r', kwargs)
    src = MySQLSource(MySQLConnectInfo(config.mysql.defaults_file), run_type,
                      **kwargs)

    callbacks = []
    try:
        _backup_stream(config, src, dst, callbacks=callbacks)
    except (DestinationError, SourceError, SshClientException) as err:
        raise OperationError(err)
    LOG.debug('Backup copy name: %s', src.get_name())

    kwargs = {
        'type': src.type,
        'binlog': src.binlog_coordinate[0],
        'position': src.binlog_coordinate[1],
        'lsn': src.lsn,
        'backup_started': backup_start,
        'backup_finished': time.time(),
        'config_files': my_cnfs(MY_CNF_COMMON_PATHS)
    }
    if src.incremental:
        kwargs['parent'] = parent.key

    backup_copy = MySQLCopy(src.host, run_type, src.basename, **kwargs)
    status.add(backup_copy)

    status = src.apply_retention_policy(dst, config, run_type, status)
    LOG.debug('status after apply_retention_policy():\n%s', status)

    backup_duration = backup_copy.duration
    export_info(config,
                data=backup_duration,
                category=ExportCategory.mysql,
                measure_type=ExportMeasureType.backup)

    status.save(dst)

    LOG.debug('Callbacks are %r', callbacks)
    for callback in callbacks:
        callback[0].callback(**callback[1])
示例#13
0
def restore_from_mysql(twindb_config,
                       copy,
                       dst_dir,
                       tmp_dir=None,
                       cache=None,
                       hostname=None):
    """
    Restore MySQL datadir in a given directory

    :param twindb_config: tool configuration
    :type twindb_config: TwinDBBackupConfig
    :param copy: Backup copy instance.
    :type copy: MySQLCopy
    :param dst_dir: Destination directory. Must exist and be empty.
    :type dst_dir: str
    :param tmp_dir: Path to temp directory
    :type tmp_dir: str
    :param cache: Local cache object.
    :type cache: Cache
    :param hostname: Hostname
    :type hostname: str

    """
    LOG.info("Restoring %s in %s", copy, dst_dir)
    mkdir_p(dst_dir)

    dst = None
    restore_start = time.time()
    keep_local_path = twindb_config.keep_local_path
    if keep_local_path and osp.exists(osp.join(keep_local_path, copy.key)):
        dst = Local(twindb_config.keep_local_path)

    if not dst:
        if not hostname:
            hostname = copy.host
            if not hostname:
                raise DestinationError("Failed to get hostname from %s" % copy)
        dst = twindb_config.destination(backup_source=hostname)

    key = copy.key
    status = MySQLStatus(dst=dst, status_directory=hostname)

    stream = dst.get_stream(copy)

    if status[key].type == "full":

        cache_key = os.path.basename(key)
        if cache:
            if cache_key in cache:
                # restore from cache
                cache.restore_in(cache_key, dst_dir)
            else:
                restore_from_mysql_full(stream,
                                        dst_dir,
                                        twindb_config,
                                        redo_only=False)
                cache.add(dst_dir, cache_key)
        else:
            restore_from_mysql_full(stream,
                                    dst_dir,
                                    twindb_config,
                                    redo_only=False)

    else:
        full_copy = status.candidate_parent(copy.run_type)
        full_stream = dst.get_stream(full_copy)
        LOG.debug("Full parent copy is %s", full_copy.key)
        cache_key = os.path.basename(full_copy.key)

        if cache:
            if cache_key in cache:
                # restore from cache
                cache.restore_in(cache_key, dst_dir)
            else:
                restore_from_mysql_full(full_stream,
                                        dst_dir,
                                        twindb_config,
                                        redo_only=True)
                cache.add(dst_dir, cache_key)
        else:
            restore_from_mysql_full(full_stream,
                                    dst_dir,
                                    twindb_config,
                                    redo_only=True)

        restore_from_mysql_incremental(stream, dst_dir, twindb_config, tmp_dir)

    config_dir = os.path.join(dst_dir, "_config")

    for path, content in get_my_cnf(status, key):
        config_sub_dir = os.path.join(config_dir,
                                      os.path.dirname(path).lstrip("/"))
        mkdir_p(config_sub_dir, mode=0o755)

        with open(os.path.join(config_sub_dir, os.path.basename(path)),
                  "w") as mysql_config:
            mysql_config.write(content)

    update_grastate(dst_dir, status, key)
    export_info(
        twindb_config,
        data=time.time() - restore_start,
        category=ExportCategory.mysql,
        measure_type=ExportMeasureType.restore,
    )
    LOG.info("Successfully restored %s in %s.", copy.key, dst_dir)
    LOG.info(
        "Now copy content of %s to MySQL datadir: "
        "cp -R %s /var/lib/mysql/",
        dst_dir,
        osp.join(dst_dir, "*"),
    )
    LOG.info("Fix permissions: chown -R mysql:mysql /var/lib/mysql/")
    LOG.info(
        "Make sure innodb_log_file_size and innodb_log_files_in_group "
        "in %s/backup-my.cnf and in /etc/my.cnf are same.",
        dst_dir,
    )

    if osp.exists(config_dir):
        LOG.info("Original my.cnf is restored in %s.", config_dir)

    LOG.info("Then you can start MySQL normally.")
示例#14
0
def backup_mysql(run_type, config):
    """Take backup of local MySQL instance

    :param run_type: Run type
    :type run_type: str
    :param config: Tool configuration
    :type config: ConfigParser.ConfigParser
    :return: None
    """
    try:
        if not config.getboolean('source', 'backup_mysql'):
            raise TwinDBBackupError('MySQL backups are not enabled in config')

    except (ConfigParser.NoOptionError, TwinDBBackupError) as err:
        LOG.debug(err)
        LOG.debug('Not backing up MySQL')
        return

    dst = get_destination(config)

    try:
        full_backup = config.get('mysql', 'full_backup')
    except ConfigParser.NoOptionError:
        full_backup = 'daily'
    backup_start = time.time()
    try:
        xtrabackup_binary = config.get('mysql', 'xtrabackup_binary')
    except ConfigParser.NoOptionError:
        xtrabackup_binary = XTRABACKUP_BINARY

    status = dst.status()

    kwargs = {
        'backup_type': status.next_backup_type(full_backup, run_type),
        'dst': dst,
        'xtrabackup_binary': xtrabackup_binary
    }
    parent = status.eligble_parent(run_type)

    if kwargs['backup_type'] == 'incremental':
        kwargs['parent_lsn'] = parent.lsn

    LOG.debug('Creating source %r', kwargs)
    src = MySQLSource(
        MySQLConnectInfo(config.get('mysql', 'mysql_defaults_file')), run_type,
        **kwargs)

    callbacks = []
    _backup_stream(config, src, dst, callbacks=callbacks)
    LOG.debug('Backup copy name: %s', src.get_name())

    kwargs = {
        'type': src.type,
        'binlog': src.binlog_coordinate[0],
        'position': src.binlog_coordinate[1],
        'lsn': src.lsn,
        'backup_started': backup_start,
        'backup_finished': time.time(),
        'config_files': my_cnfs(MY_CNF_COMMON_PATHS)
    }
    if src.incremental:
        kwargs['parent'] = parent.key

    backup_copy = MySQLCopy(src.host, run_type, src.basename, **kwargs)
    status.add(backup_copy)

    status = src.apply_retention_policy(dst, config, run_type, status)
    LOG.debug('status after apply_retention_policy():\n%s', status)

    backup_duration = status.backup_duration(run_type, src.get_name())
    export_info(config,
                data=backup_duration,
                category=ExportCategory.mysql,
                measure_type=ExportMeasureType.backup)
    dst.status(status)

    LOG.debug('Callbacks are %r', callbacks)
    for callback in callbacks:
        callback[0].callback(**callback[1])
示例#15
0
文件: backup.py 项目: twindb/backup
def backup_mysql(run_type, config):
    """Take backup of local MySQL instance

    :param run_type: Run type
    :type run_type: str
    :param config: Tool configuration
    :type config: TwinDBBackupConfig
    """
    if config.backup_mysql is False:
        LOG.debug('Not backing up MySQL')
        return

    dst = config.destination()

    try:
        full_backup = config.mysql.full_backup
    except ConfigParser.NoOptionError:
        full_backup = 'daily'
    backup_start = time.time()

    status = MySQLStatus(dst=dst)

    kwargs = {
        'backup_type': status.next_backup_type(full_backup, run_type),
        'dst': dst,
        'xtrabackup_binary': config.mysql.xtrabackup_binary
    }
    parent = status.candidate_parent(run_type)

    if kwargs['backup_type'] == 'incremental':
        kwargs['parent_lsn'] = parent.lsn

    LOG.debug('Creating source %r', kwargs)
    src = MySQLSource(
        MySQLConnectInfo(config.mysql.defaults_file),
        run_type,
        **kwargs
    )

    callbacks = []
    try:
        _backup_stream(config, src, dst, callbacks=callbacks)
    except (DestinationError, SourceError, SshClientException) as err:
        raise OperationError(err)
    LOG.debug('Backup copy name: %s', src.get_name())

    kwargs = {
        'type': src.type,
        'binlog': src.binlog_coordinate[0],
        'position': src.binlog_coordinate[1],
        'lsn': src.lsn,
        'backup_started': backup_start,
        'backup_finished': time.time(),
        'config_files': my_cnfs(MY_CNF_COMMON_PATHS)
    }
    if src.incremental:
        kwargs['parent'] = parent.key

    backup_copy = MySQLCopy(
        src.host,
        run_type,
        src.basename,
        **kwargs
    )
    status.add(backup_copy)

    status = src.apply_retention_policy(dst, config, run_type, status)
    LOG.debug('status after apply_retention_policy():\n%s', status)

    backup_duration = backup_copy.duration
    export_info(
        config,
        data=backup_duration,
        category=ExportCategory.mysql,
        measure_type=ExportMeasureType.backup
    )

    status.save(dst)

    LOG.debug('Callbacks are %r', callbacks)
    for callback in callbacks:
        callback[0].callback(**callback[1])