def restore_from_file(twindb_config, copy, dst_dir): """ Restore a directory from a backup copy in the directory :param twindb_config: tool configuration :type twindb_config: TwinDBBackupConfig :param copy: Instance of BaseCopy or and inheriting classes. :type copy: BaseCopy :param dst_dir: Path to destination directory. Must exist and be empty. :type dst_dir: str """ LOG.info('Restoring %s in %s', copy.key, dst_dir) mkdir_p(dst_dir) restore_start = time.time() keep_local_path = twindb_config.keep_local_path if keep_local_path and os.path.exists(osp.join(keep_local_path, copy.key)): dst = Local(osp.join(keep_local_path, copy.key)) stream = dst.get_stream(copy) else: dst = twindb_config.destination() stream = dst.get_stream(copy) # GPG modifier if twindb_config.gpg: gpg = Gpg(stream, twindb_config.gpg.recipient, twindb_config.gpg.keyring, secret_keyring=twindb_config.gpg.secret_keyring) LOG.debug('Decrypting stream') stream = gpg.revert_stream() else: LOG.debug('Not decrypting the stream') with stream as handler: try: LOG.debug('handler type: %s', type(handler)) LOG.debug('stream type: %s', type(stream)) cmd = ["tar", "zvxf", "-"] LOG.debug('Running %s', ' '.join(cmd)) proc = Popen(cmd, stdin=handler, cwd=dst_dir) cout, cerr = proc.communicate() ret = proc.returncode if ret: LOG.error('%s exited with code %d', cmd, ret) if cout: LOG.error('STDOUT: %s', cout) if cerr: LOG.error('STDERR: %s', cerr) return LOG.info('Successfully restored %s in %s', copy.key, dst_dir) except (OSError, DestinationError) as err: LOG.error('Failed to decompress %s: %s', copy.key, err) exit(1) export_info(twindb_config, data=time.time() - restore_start, category=ExportCategory.files, measure_type=ExportMeasureType.restore)
def restore_from_file(config, backup_copy, dst_dir): """ Restore a directory from a backup copy in the directory :param config: Tool configuration. :type config: ConfigParser.ConfigParser :param backup_copy: Backup name. :type backup_copy: str :param dst_dir: Path to destination directory. Must exist and be empty. :type dst_dir: str """ LOG.info('Restoring %s in %s', backup_copy, dst_dir) mkdir_p(dst_dir) restore_start = time.time() if os.path.exists(backup_copy): dst = Local(backup_copy) stream = dst.get_stream(backup_copy) else: dst = get_destination(config) stream = dst.get_stream(backup_copy) # GPG modifier try: gpg = Gpg(stream, config.get('gpg', 'recipient'), config.get('gpg', 'keyring'), secret_keyring=config.get('gpg', 'secret_keyring')) LOG.debug('Decrypting stream') stream = gpg.revert_stream() except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): LOG.debug('Not decrypting the stream') with stream as handler: try: LOG.debug('handler type: %s', type(handler)) LOG.debug('stream type: %s', type(stream)) cmd = ["tar", "zvxf", "-"] LOG.debug('Running %s', ' '.join(cmd)) proc = Popen(cmd, stdin=handler, cwd=dst_dir) cout, cerr = proc.communicate() ret = proc.returncode if ret: LOG.error('%s exited with code %d', cmd, ret) if cout: LOG.error('STDOUT: %s', cout) if cerr: LOG.error('STDERR: %s', cerr) return LOG.info('Successfully restored %s in %s', backup_copy, dst_dir) except (OSError, DestinationError) as err: LOG.error('Failed to decompress %s: %s', backup_copy, err) exit(1) export_info(config, data=time.time() - restore_start, category=ExportCategory.files, measure_type=ExportMeasureType.restore)
def list_available_backups(twindb_config, copy_type=None): """ Print known backup copies on a destination specified in the configuration. :param twindb_config: tool configuration :type twindb_config: TwinDBBackupConfig :param copy_type: Limit list to specific type of backups. :type copy_type: files|mysql """ dsts = [twindb_config.destination()] if twindb_config.keep_local_path: dsts.insert(0, Local(twindb_config.keep_local_path)) for dst in dsts: LOG.info('Destination %s', dst) for mtype in MEDIA_TYPES: if copy_type in [None, mtype]: func = "_print_%s" % mtype globals()[func](dst)
def list_available_backups(config, copy_type=None): """ Print known backup copies on a destination specified in the configuration. :param config: tool configuration :type config: ConfigParser.ConfigParser :param copy_type: Limit list to specific type of backups. :type copy_type: files|mysql """ dsts = [get_destination(config)] if config.has_option('destination', 'keep_local_path'): dsts.insert(0, Local(config.get('destination', 'keep_local_path'))) for dst in dsts: LOG.info('Destination %s', dst) for mtype in MEDIA_TYPES: if copy_type in [None, mtype]: func = "_print_%s" % mtype globals()[func](dst)
def callback(self, **kwargs): local_dst = Local(kwargs['keep_local_path']) local_dst.status(kwargs['dst'].status())
def restore_from_file(twindb_config, copy, dst_dir): """ Restore a directory from a backup copy in the directory :param twindb_config: tool configuration :type twindb_config: TwinDBBackupConfig :param copy: Instance of BaseCopy or and inheriting classes. :type copy: BaseCopy :param dst_dir: Path to destination directory. Must exist and be empty. :type dst_dir: str """ LOG.info('Restoring %s in %s', copy.key, dst_dir) mkdir_p(dst_dir) restore_start = time.time() keep_local_path = twindb_config.keep_local_path if keep_local_path and os.path.exists(osp.join(keep_local_path, copy.key)): dst = Local(osp.join(keep_local_path, copy.key)) stream = dst.get_stream(copy) else: dst = twindb_config.destination() stream = dst.get_stream(copy) # GPG modifier if twindb_config.gpg: gpg = Gpg( stream, twindb_config.gpg.recipient, twindb_config.gpg.keyring, secret_keyring=twindb_config.gpg.secret_keyring ) LOG.debug('Decrypting stream') stream = gpg.revert_stream() else: LOG.debug('Not decrypting the stream') with stream as handler: try: LOG.debug('handler type: %s', type(handler)) LOG.debug('stream type: %s', type(stream)) cmd = ["tar", "zvxf", "-"] LOG.debug('Running %s', ' '.join(cmd)) proc = Popen(cmd, stdin=handler, cwd=dst_dir) cout, cerr = proc.communicate() ret = proc.returncode if ret: LOG.error('%s exited with code %d', cmd, ret) if cout: LOG.error('STDOUT: %s', cout) if cerr: LOG.error('STDERR: %s', cerr) return LOG.info('Successfully restored %s in %s', copy.key, dst_dir) except (OSError, DestinationError) as err: LOG.error('Failed to decompress %s: %s', copy.key, err) exit(1) export_info( twindb_config, data=time.time() - restore_start, category=ExportCategory.files, measure_type=ExportMeasureType.restore )
def restore_from_mysql(twindb_config, copy, dst_dir, tmp_dir=None, cache=None, hostname=None): """ Restore MySQL datadir in a given directory :param twindb_config: tool configuration :type twindb_config: TwinDBBackupConfig :param copy: Backup copy instance. :type copy: MySQLCopy :param dst_dir: Destination directory. Must exist and be empty. :type dst_dir: str :param tmp_dir: Path to temp directory :type tmp_dir: str :param cache: Local cache object. :type cache: Cache :param hostname: Hostname :type hostname: str """ LOG.info('Restoring %s in %s', copy, dst_dir) mkdir_p(dst_dir) dst = None restore_start = time.time() keep_local_path = twindb_config.keep_local_path if keep_local_path and osp.exists(osp.join(keep_local_path, copy.key)): dst = Local(twindb_config.keep_local_path) if not dst: if not hostname: hostname = copy.host if not hostname: raise DestinationError( 'Failed to get hostname from %s' % copy ) dst = twindb_config.destination(backup_source=hostname) key = copy.key status = MySQLStatus(dst=dst) stream = dst.get_stream(copy) if status[key].type == "full": cache_key = os.path.basename(key) if cache: if cache_key in cache: # restore from cache cache.restore_in(cache_key, dst_dir) else: restore_from_mysql_full( stream, dst_dir, twindb_config, redo_only=False ) cache.add(dst_dir, cache_key) else: restore_from_mysql_full( stream, dst_dir, twindb_config, redo_only=False) else: full_copy = status.candidate_parent( copy.run_type ) full_stream = dst.get_stream(full_copy) LOG.debug("Full parent copy is %s", full_copy.key) cache_key = os.path.basename(full_copy.key) if cache: if cache_key in cache: # restore from cache cache.restore_in(cache_key, dst_dir) else: restore_from_mysql_full( full_stream, dst_dir, twindb_config, redo_only=True ) cache.add(dst_dir, cache_key) else: restore_from_mysql_full( full_stream, dst_dir, twindb_config, redo_only=True ) restore_from_mysql_incremental( stream, dst_dir, twindb_config, tmp_dir ) config_dir = os.path.join(dst_dir, "_config") for path, content in get_my_cnf(status, key): config_sub_dir = os.path.join( config_dir, os.path.dirname(path).lstrip('/') ) mkdir_p(config_sub_dir, mode=0755) with open(os.path.join(config_sub_dir, os.path.basename(path)), 'w') as mysql_config: mysql_config.write(content) update_grastate(dst_dir, status, key) export_info(twindb_config, data=time.time() - restore_start, category=ExportCategory.mysql, measure_type=ExportMeasureType.restore) LOG.info('Successfully restored %s in %s.', copy.key, dst_dir) LOG.info('Now copy content of %s to MySQL datadir: ' 'cp -R %s /var/lib/mysql/', dst_dir, osp.join(dst_dir, '*')) LOG.info('Fix permissions: chown -R mysql:mysql /var/lib/mysql/') LOG.info('Make sure innodb_log_file_size and innodb_log_files_in_group ' 'in %s/backup-my.cnf and in /etc/my.cnf are same.', dst_dir) if osp.exists(config_dir): LOG.info('Original my.cnf is restored in %s.', config_dir) LOG.info('Then you can start MySQL normally.')
def restore_from_mysql(config, copy, dst_dir, tmp_dir=None, cache=None, hostname=None): """ Restore MySQL datadir in a given directory :param config: Tool configuration. :type config: ConfigParser.ConfigParser :param copy: Backup copy instance. :type copy: MySQLCopy :param dst_dir: Destination directory. Must exist and be empty. :type dst_dir: str :param tmp_dir: Path to temp directory :type tmp_dir: str :param cache: Local cache object. :type cache: Cache :param hostname: Hostname :type hostname: str """ LOG.info('Restoring %s in %s', copy, dst_dir) mkdir_p(dst_dir) dst = None restore_start = time.time() try: xtrabackup_binary = config.get('mysql', 'xtrabackup_binary') except ConfigParser.NoOptionError: xtrabackup_binary = XTRABACKUP_BINARY try: xbstream_binary = config.get('mysql', 'xbstream_binary') except ConfigParser.NoOptionError: xbstream_binary = XBSTREAM_BINARY try: keep_local_path = config.get('destination', 'keep_local_path') if osp.exists(osp.join(keep_local_path, copy.key)): dst = Local(keep_local_path) except ConfigParser.NoOptionError: pass if not dst: if not hostname: hostname = copy.host if not hostname: raise DestinationError('Failed to get hostname from %s' % copy) dst = get_destination(config, hostname=hostname) key = copy.key status = dst.status() stream = dst.get_stream(copy) if status[key].type == "full": cache_key = os.path.basename(key) if cache: if cache_key in cache: # restore from cache cache.restore_in(cache_key, dst_dir) else: restore_from_mysql_full(stream, dst_dir, config, redo_only=False, xtrabackup_binary=xtrabackup_binary, xbstream_binary=xbstream_binary) cache.add(dst_dir, cache_key) else: restore_from_mysql_full(stream, dst_dir, config, redo_only=False, xtrabackup_binary=xtrabackup_binary, xbstream_binary=xbstream_binary) else: full_copy = status.candidate_parent(copy.run_type) full_stream = dst.get_stream(full_copy) LOG.debug("Full parent copy is %s", full_copy.key) cache_key = os.path.basename(full_copy.key) if cache: if cache_key in cache: # restore from cache cache.restore_in(cache_key, dst_dir) else: restore_from_mysql_full(full_stream, dst_dir, config, redo_only=True, xtrabackup_binary=xtrabackup_binary, xbstream_binary=xbstream_binary) cache.add(dst_dir, cache_key) else: restore_from_mysql_full(full_stream, dst_dir, config, redo_only=True, xtrabackup_binary=xtrabackup_binary, xbstream_binary=xbstream_binary) restore_from_mysql_incremental(stream, dst_dir, config, tmp_dir, xtrabackup_binary=xtrabackup_binary, xbstream_binary=xbstream_binary) config_dir = os.path.join(dst_dir, "_config") for path, content in get_my_cnf(status, key): config_sub_dir = os.path.join(config_dir, os.path.dirname(path).lstrip('/')) mkdir_p(config_sub_dir, mode=0755) with open(os.path.join(config_sub_dir, os.path.basename(path)), 'w') as mysql_config: mysql_config.write(content) update_grastate(dst_dir, status, key) export_info(config, data=time.time() - restore_start, category=ExportCategory.mysql, measure_type=ExportMeasureType.restore) LOG.info('Successfully restored %s in %s.', copy.key, dst_dir) LOG.info( 'Now copy content of %s to MySQL datadir: ' 'cp -R %s /var/lib/mysql/', dst_dir, osp.join(dst_dir, '*')) LOG.info('Fix permissions: chown -R mysql:mysql /var/lib/mysql/') LOG.info( 'Make sure innodb_log_file_size and innodb_log_files_in_group ' 'in %s/backup-my.cnf and in /etc/my.cnf are same.', dst_dir) if osp.exists(config_dir): LOG.info('Original my.cnf is restored in %s.', config_dir) LOG.info('Then you can start MySQL normally.')
def restore_from_mysql(config, backup_copy, dst_dir, cache=None): """ Restore MySQL datadir in a given directory :param config: Tool configuration. :type config: ConfigParser.ConfigParser :param backup_copy: Backup copy name. :type backup_copy: str :param dst_dir: Destination directory. Must exist and be empty. :type dst_dir: str :param cache: Local cache object. :type cache: Cache """ LOG.info('Restoring %s in %s', backup_copy, dst_dir) mkdir_p(dst_dir) dst = None try: keep_local_path = config.get('destination', 'keep_local_path') if os.path.exists(backup_copy) \ and backup_copy.startswith(keep_local_path): dst = Local(keep_local_path) except ConfigParser.NoOptionError: pass if not dst: hostname = get_hostname_from_backup_copy(backup_copy) if not hostname: raise DestinationError('Failed to get hostname from %s' % backup_copy) dst = get_destination(config, hostname=hostname) key = dst.basename(backup_copy) status = dst.status() stream = dst.get_stream(backup_copy) if get_backup_type(status, key) == "full": cache_key = os.path.basename(key) if cache: if cache_key in cache: # restore from cache cache.restore_in(cache_key, dst_dir) else: restore_from_mysql_full(stream, dst_dir, config) cache.add(dst_dir, cache_key) else: restore_from_mysql_full(stream, dst_dir, config) else: full_copy = dst.get_full_copy_name(backup_copy) full_stream = dst.get_stream(full_copy) cache_key = os.path.basename(full_copy) if cache: if cache_key in cache: # restore from cache cache.restore_in(cache_key, dst_dir) else: restore_from_mysql_full(full_stream, dst_dir, config, redo_only=True) cache.add(dst_dir, cache_key) else: restore_from_mysql_full(full_stream, dst_dir, config, redo_only=True) restore_from_mysql_incremental(stream, dst_dir, config) config_dir = os.path.join(dst_dir, "_config") for path, content in get_my_cnf(status, key): config_sub_dir = os.path.join(config_dir, os.path.dirname(path).lstrip('/')) os.makedirs(config_sub_dir) with open(os.path.join(config_sub_dir, os.path.basename(path)), 'w') as mysql_config: mysql_config.write(content) update_grastate(dst_dir, status, key) LOG.info('Successfully restored %s in %s.', backup_copy, dst_dir) LOG.info('Now copy content of %s to MySQL datadir: ' 'cp -R %s/* /var/lib/mysql/', dst_dir, dst_dir) LOG.info('Fix permissions: chown -R mysql:mysql /var/lib/mysql/') LOG.info('Make sure innodb_log_file_size and innodb_log_files_in_group ' 'in %s/backup-my.cnf and in /etc/my.cnf are same.', dst_dir) if os.path.exists(config_dir): LOG.info('Original my.cnf is restored in %s.', config_dir) LOG.info('Then you can start MySQL normally.')
def restore_from_mysql(twindb_config, copy, dst_dir, tmp_dir=None, cache=None, hostname=None): """ Restore MySQL datadir in a given directory :param twindb_config: tool configuration :type twindb_config: TwinDBBackupConfig :param copy: Backup copy instance. :type copy: MySQLCopy :param dst_dir: Destination directory. Must exist and be empty. :type dst_dir: str :param tmp_dir: Path to temp directory :type tmp_dir: str :param cache: Local cache object. :type cache: Cache :param hostname: Hostname :type hostname: str """ LOG.info("Restoring %s in %s", copy, dst_dir) mkdir_p(dst_dir) dst = None restore_start = time.time() keep_local_path = twindb_config.keep_local_path if keep_local_path and osp.exists(osp.join(keep_local_path, copy.key)): dst = Local(twindb_config.keep_local_path) if not dst: if not hostname: hostname = copy.host if not hostname: raise DestinationError("Failed to get hostname from %s" % copy) dst = twindb_config.destination(backup_source=hostname) key = copy.key status = MySQLStatus(dst=dst, status_directory=hostname) stream = dst.get_stream(copy) if status[key].type == "full": cache_key = os.path.basename(key) if cache: if cache_key in cache: # restore from cache cache.restore_in(cache_key, dst_dir) else: restore_from_mysql_full(stream, dst_dir, twindb_config, redo_only=False) cache.add(dst_dir, cache_key) else: restore_from_mysql_full(stream, dst_dir, twindb_config, redo_only=False) else: full_copy = status.candidate_parent(copy.run_type) full_stream = dst.get_stream(full_copy) LOG.debug("Full parent copy is %s", full_copy.key) cache_key = os.path.basename(full_copy.key) if cache: if cache_key in cache: # restore from cache cache.restore_in(cache_key, dst_dir) else: restore_from_mysql_full(full_stream, dst_dir, twindb_config, redo_only=True) cache.add(dst_dir, cache_key) else: restore_from_mysql_full(full_stream, dst_dir, twindb_config, redo_only=True) restore_from_mysql_incremental(stream, dst_dir, twindb_config, tmp_dir) config_dir = os.path.join(dst_dir, "_config") for path, content in get_my_cnf(status, key): config_sub_dir = os.path.join(config_dir, os.path.dirname(path).lstrip("/")) mkdir_p(config_sub_dir, mode=0o755) with open(os.path.join(config_sub_dir, os.path.basename(path)), "w") as mysql_config: mysql_config.write(content) update_grastate(dst_dir, status, key) export_info( twindb_config, data=time.time() - restore_start, category=ExportCategory.mysql, measure_type=ExportMeasureType.restore, ) LOG.info("Successfully restored %s in %s.", copy.key, dst_dir) LOG.info( "Now copy content of %s to MySQL datadir: " "cp -R %s /var/lib/mysql/", dst_dir, osp.join(dst_dir, "*"), ) LOG.info("Fix permissions: chown -R mysql:mysql /var/lib/mysql/") LOG.info( "Make sure innodb_log_file_size and innodb_log_files_in_group " "in %s/backup-my.cnf and in /etc/my.cnf are same.", dst_dir, ) if osp.exists(config_dir): LOG.info("Original my.cnf is restored in %s.", config_dir) LOG.info("Then you can start MySQL normally.")
def callback(self, **kwargs): local_dst = Local(kwargs["keep_local_path"]) status = MySQLStatus(dst=kwargs["dst"]) status.save(local_dst)
def test_status_path_mysql(mock_socket, cls, filename, tmpdir): mock_socket.gethostname.return_value = 'foo' path = tmpdir.mkdir('backups') dst = Local(str(path)) assert dst.status_path(cls=cls) == '%s/foo/%s' \ % (str(path), filename)