Esempio n. 1
0
def _backup_stream(config, src, dst, callbacks=None):
    stream = src.get_stream()
    # Gzip modifier
    stream = Gzip(stream).get_stream()
    src.suffix += '.gz'
    # KeepLocal modifier
    try:
        keep_local_path = config.get('destination', 'keep_local_path')
        kl_modifier = KeepLocal(stream,
                                os.path.join(keep_local_path, src.get_name()))
        stream = kl_modifier.get_stream()
        if callbacks is not None:
            callbacks.append((kl_modifier, {
                'keep_local_path': keep_local_path,
                'dst': dst
            }))
    except ConfigParser.NoOptionError:
        LOG.debug('keep_local_path is not present in the config file')
    # GPG modifier
    try:
        stream = Gpg(stream, config.get('gpg', 'recipient'),
                     config.get('gpg', 'keyring')).get_stream()
        src.suffix += '.gpg'
    except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
        pass
    except ModifierException as err:
        LOG.warning(err)
        LOG.warning('Will skip encryption')
    if not dst.save(stream, src.get_name()):
        LOG.error('Failed to save backup copy %s', src.get_name())
        exit(1)
Esempio n. 2
0
def restore_from_mysql_full(stream, dst_dir, config,
                            redo_only=False):
    """
    Restore MySQL datadir from a backup copy

    :param stream: Generator that provides backup copy
    :param dst_dir: Path to destination directory. Must exist and be empty.
    :type dst_dir: str
    :param config: Tool configuration.
    :type config: ConfigParser.ConfigParser
    :param redo_only: True if the function has to do final apply of
        the redo log. For example, if you restore backup from a full copy
        it should be False. If you restore from incremental copy and
        you restore base full copy redo_only should be True.
    :type redo_only: bool
    :return: If success, return True
    :rtype: bool
    """
    # GPG modifier
    try:
        gpg = Gpg(stream,
                  config.get('gpg', 'recipient'),
                  config.get('gpg', 'keyring'),
                  secret_keyring=config.get('gpg', 'secret_keyring'))
        LOG.debug('Decrypting stream')
        stream = gpg.revert_stream()
    except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
        LOG.debug('Not decrypting the stream')

    stream = Gzip(stream).revert_stream()

    with stream as handler:
        if not _extract_xbstream(handler, dst_dir):
            return False

    mem_usage = psutil.virtual_memory()
    try:
        xtrabackup_cmd = ['innobackupex',
                          '--use-memory=%d' % (mem_usage.available/2),
                          '--apply-log']
        if redo_only:
            xtrabackup_cmd += ['--redo-only']
        xtrabackup_cmd += [dst_dir]

        LOG.debug('Running %s', ' '.join(xtrabackup_cmd))
        xtrabackup_proc = Popen(xtrabackup_cmd,
                                stdout=None,
                                stderr=None)
        xtrabackup_proc.communicate()
        ret = xtrabackup_proc.returncode
        if ret:
            LOG.error('%s exited with code %d', " ".join(xtrabackup_cmd), ret)
        return ret == 0
    except OSError as err:
        LOG.error('Failed to prepare backup in %s: %s', dst_dir, err)
        return False
Esempio n. 3
0
def backup_files(run_type, config):
    """Backup local directories

    :param run_type: Run type
    :type run_type: str
    :param config: Configuration
    :type config: ConfigParser.ConfigParser
    """
    for directory in get_directories_to_backup(config):
        LOG.debug('copying %s', directory)
        src = FileSource(directory, run_type)
        dst = get_destination(config)

        stream = src.get_stream()

        # Gzip modifier
        stream = Gzip(stream).get_stream()
        src.suffix += '.gz'

        # KeepLocal modifier
        try:
            keep_local_path = config.get('destination', 'keep_local_path')
            # src.suffix = 'tar.gz.aaa'
            dst_name = src.get_name()
            kl_modifier = KeepLocal(stream,
                                    os.path.join(keep_local_path, dst_name))
            stream = kl_modifier.get_stream()
        except ConfigParser.NoOptionError:
            pass

        # GPG modifier
        try:
            keyring = config.get('gpg', 'keyring')
            recipient = config.get('gpg', 'recipient')
            gpg = Gpg(stream, recipient, keyring)
            stream = gpg.get_stream()
            src.suffix += '.gpg'
        except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
            pass
        except ModifierException as err:
            LOG.warning(err)
            LOG.warning('Will skip encryption')

        dst.save(stream, src.get_name())

        src.apply_retention_policy(dst, config, run_type)
def restore_from_mysql_incremental(stream,
                                   dst_dir,
                                   config,
                                   tmp_dir=None,
                                   xtrabackup_binary=XTRABACKUP_BINARY,
                                   xbstream_binary=XBSTREAM_BINARY):
    """
    Restore MySQL datadir from an incremental copy.

    :param stream: Generator that provides backup copy
    :param dst_dir: Path to destination directory. Must exist and be empty.
    :type dst_dir: str
    :param config: Tool configuration.
    :type config: ConfigParser.ConfigParser
    :param tmp_dir: Path to temp dir
    :type tmp_dir: str
    :param xtrabackup_binary: Path to xtrabackup binary.
    :param xbstream_binary: Path to xbstream binary
    :return: If success, return True
    :rtype: bool
    """
    if tmp_dir is None:
        try:
            inc_dir = tempfile.mkdtemp()
        except (IOError, OSError):
            try:
                empty_dir(dst_dir)
            except (IOError, OSError):
                raise
            raise
    else:
        inc_dir = tmp_dir
    # GPG modifier
    try:
        gpg = Gpg(stream,
                  config.get('gpg', 'recipient'),
                  config.get('gpg', 'keyring'),
                  secret_keyring=config.get('gpg', 'secret_keyring'))
        LOG.debug('Decrypting stream')
        stream = gpg.revert_stream()
    except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
        LOG.debug('Not decrypting the stream')

    stream = Gzip(stream).revert_stream()

    with stream as handler:
        if not _extract_xbstream(handler, inc_dir, xbstream_binary):
            return False

    try:
        mem_usage = psutil.virtual_memory()
        try:
            xtrabackup_cmd = [
                xtrabackup_binary,
                '--use-memory=%d' % (mem_usage.available / 2), '--prepare',
                '--apply-log-only',
                '--target-dir=%s' % dst_dir
            ]
            LOG.debug('Running %s', ' '.join(xtrabackup_cmd))
            xtrabackup_proc = Popen(xtrabackup_cmd, stdout=None, stderr=None)
            xtrabackup_proc.communicate()
            ret = xtrabackup_proc.returncode
            if ret:
                LOG.error('%s exited with code %d', " ".join(xtrabackup_cmd),
                          ret)
                return False

            xtrabackup_cmd = [
                xtrabackup_binary,
                '--use-memory=%d' % (mem_usage.available / 2), '--prepare',
                "--target-dir=%s" % dst_dir,
                "--incremental-dir=%s" % inc_dir
            ]
            LOG.debug('Running %s', ' '.join(xtrabackup_cmd))
            xtrabackup_proc = Popen(xtrabackup_cmd, stdout=None, stderr=None)
            xtrabackup_proc.communicate()
            ret = xtrabackup_proc.returncode
            if ret:
                LOG.error('%s exited with code %d', " ".join(xtrabackup_cmd),
                          ret)
            return ret == 0
        except OSError as err:
            LOG.error('Failed to prepare backup in %s: %s', dst_dir, err)
            return False
    finally:
        try:
            pass
        except OSError as exc:
            if exc.errno != errno.ENOENT:  # ENOENT - no such file or directory
                raise  # re-raise exception
Esempio n. 5
0
def backup_mysql(run_type, config):
    """Take backup of local MySQL instance

    :param run_type: Run type
    :type run_type: str
    :param config: Tool configuration
    :type config: ConfigParser.ConfigParser
    :return: None
    """
    try:
        if not config.getboolean('source', 'backup_mysql'):
            raise TwinDBBackupError('MySQL backups are not enabled in config')

    except (ConfigParser.NoOptionError, TwinDBBackupError) as err:
        LOG.debug(err)
        LOG.debug('Not backing up MySQL')
        return

    dst = get_destination(config)

    try:
        full_backup = config.get('mysql', 'full_backup')
    except ConfigParser.NoOptionError:
        full_backup = 'daily'
    backup_start = time.time()
    src = MySQLSource(
        MySQLConnectInfo(config.get('mysql', 'mysql_defaults_file')), run_type,
        full_backup, dst)

    callbacks = []
    stream = src.get_stream()
    src_name = src.get_name()

    # Gzip modifier
    stream = Gzip(stream).get_stream()
    src_name += '.gz'

    # KeepLocal modifier
    try:
        keep_local_path = config.get('destination', 'keep_local_path')
        kl_modifier = KeepLocal(stream, os.path.join(keep_local_path,
                                                     src_name))
        stream = kl_modifier.get_stream()

        callbacks.append((kl_modifier, {
            'keep_local_path': keep_local_path,
            'dst': dst
        }))

    except ConfigParser.NoOptionError:
        LOG.debug('keep_local_path is not present in the config file')

    # GPG modifier
    try:
        stream = Gpg(stream, config.get('gpg', 'recipient'),
                     config.get('gpg', 'keyring')).get_stream()
        src_name += '.gpg'
    except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
        pass
    except ModifierException as err:
        LOG.warning(err)
        LOG.warning('Will skip encryption')

    if not dst.save(stream, src_name):
        LOG.error('Failed to save backup copy %s', src_name)
        exit(1)
    status = prepare_status(dst, src, run_type, src_name, backup_start)

    src.apply_retention_policy(dst, config, run_type, status)

    dst.status(status)

    LOG.debug('Callbacks are %r', callbacks)
    for callback in callbacks:
        callback[0].callback(**callback[1])