def restore_from_file(twindb_config, copy, dst_dir): """ Restore a directory from a backup copy in the directory :param twindb_config: tool configuration :type twindb_config: TwinDBBackupConfig :param copy: Instance of BaseCopy or and inheriting classes. :type copy: BaseCopy :param dst_dir: Path to destination directory. Must exist and be empty. :type dst_dir: str """ LOG.info('Restoring %s in %s', copy.key, dst_dir) mkdir_p(dst_dir) restore_start = time.time() keep_local_path = twindb_config.keep_local_path if keep_local_path and os.path.exists(osp.join(keep_local_path, copy.key)): dst = Local(osp.join(keep_local_path, copy.key)) stream = dst.get_stream(copy) else: dst = twindb_config.destination() stream = dst.get_stream(copy) # GPG modifier if twindb_config.gpg: gpg = Gpg(stream, twindb_config.gpg.recipient, twindb_config.gpg.keyring, secret_keyring=twindb_config.gpg.secret_keyring) LOG.debug('Decrypting stream') stream = gpg.revert_stream() else: LOG.debug('Not decrypting the stream') with stream as handler: try: LOG.debug('handler type: %s', type(handler)) LOG.debug('stream type: %s', type(stream)) cmd = ["tar", "zvxf", "-"] LOG.debug('Running %s', ' '.join(cmd)) proc = Popen(cmd, stdin=handler, cwd=dst_dir) cout, cerr = proc.communicate() ret = proc.returncode if ret: LOG.error('%s exited with code %d', cmd, ret) if cout: LOG.error('STDOUT: %s', cout) if cerr: LOG.error('STDERR: %s', cerr) return LOG.info('Successfully restored %s in %s', copy.key, dst_dir) except (OSError, DestinationError) as err: LOG.error('Failed to decompress %s: %s', copy.key, err) exit(1) export_info(twindb_config, data=time.time() - restore_start, category=ExportCategory.files, measure_type=ExportMeasureType.restore)
def restore_from_file(config, backup_copy, dst_dir): """ Restore a directory from a backup copy in the directory :param config: Tool configuration. :type config: ConfigParser.ConfigParser :param backup_copy: Backup name. :type backup_copy: str :param dst_dir: Path to destination directory. Must exist and be empty. :type dst_dir: str """ LOG.info('Restoring %s in %s', backup_copy, dst_dir) mkdir_p(dst_dir) restore_start = time.time() if os.path.exists(backup_copy): dst = Local(backup_copy) stream = dst.get_stream(backup_copy) else: dst = get_destination(config) stream = dst.get_stream(backup_copy) # GPG modifier try: gpg = Gpg(stream, config.get('gpg', 'recipient'), config.get('gpg', 'keyring'), secret_keyring=config.get('gpg', 'secret_keyring')) LOG.debug('Decrypting stream') stream = gpg.revert_stream() except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): LOG.debug('Not decrypting the stream') with stream as handler: try: LOG.debug('handler type: %s', type(handler)) LOG.debug('stream type: %s', type(stream)) cmd = ["tar", "zvxf", "-"] LOG.debug('Running %s', ' '.join(cmd)) proc = Popen(cmd, stdin=handler, cwd=dst_dir) cout, cerr = proc.communicate() ret = proc.returncode if ret: LOG.error('%s exited with code %d', cmd, ret) if cout: LOG.error('STDOUT: %s', cout) if cerr: LOG.error('STDERR: %s', cerr) return LOG.info('Successfully restored %s in %s', backup_copy, dst_dir) except (OSError, DestinationError) as err: LOG.error('Failed to decompress %s: %s', backup_copy, err) exit(1) export_info(config, data=time.time() - restore_start, category=ExportCategory.files, measure_type=ExportMeasureType.restore)
def __init__(self, input_stream, local_path): """ Modifier that saves a local copy of the stream in local_path file. :param input_stream: Input stream. Must be file object :param local_path: path to local file """ super(KeepLocal, self).__init__(input_stream) self.local_path = local_path local_dir = os.path.dirname(self.local_path) mkdir_p(local_dir)
def __init__(self, input_stream, local_path): """ Modifier that saves a local copy of the stream in local_path file. :param input_stream: Input stream. Must be file object :param local_path: path to local file """ super(KeepLocal, self).__init__(input_stream) self.local_path = local_path local_dir = os.path.dirname(self.local_path) try: mkdir_p(local_dir) except OSError as err: raise ModifierException('Failed to create directory %s: %s' % (local_dir, err))
def __init__(self, input_stream, local_path): """ Modifier that saves a local copy of the stream in local_path file. :param input_stream: Input stream. Must be file object :param local_path: path to local file """ super(KeepLocal, self).__init__(input_stream) self.local_path = local_path local_dir = os.path.dirname(self.local_path) try: mkdir_p(local_dir) except OSError as err: raise ModifierException( 'Failed to create directory %s: %s' % (local_dir, err) )
def test_my_cnfs(paths, configs, tmpdir): root = tmpdir.mkdir('root') # prefix each path in paths full_paths = [x.format(root=str(root)) for x in paths] keys = [] for key, content in configs.items(): full_path = key.format(root=str(root)) keys.append(full_path) mkdir_p(dirname(full_path)) with open(full_path, 'w') as fp: fp.write(content.format(root=str(root))) list_a = sorted(my_cnfs(common_paths=full_paths)) list_b = sorted(keys) assert list_a == list_b
def test_my_cnfs(paths, configs, tmpdir): root = tmpdir.mkdir('root') # prefix each path in paths full_paths = [x.format(root=str(root)) for x in paths] keys = [] for key, content in configs.iteritems(): full_path = key.format(root=str(root)) keys.append(full_path) mkdir_p(dirname(full_path)) with open(full_path, 'w') as fp: fp.write(content.format(root=str(root))) list_a = sorted(my_cnfs(common_paths=full_paths)) list_b = sorted(keys) assert list_a == list_b
def restore_from_mysql(config, copy, dst_dir, tmp_dir=None, cache=None, hostname=None): """ Restore MySQL datadir in a given directory :param config: Tool configuration. :type config: ConfigParser.ConfigParser :param copy: Backup copy instance. :type copy: MySQLCopy :param dst_dir: Destination directory. Must exist and be empty. :type dst_dir: str :param tmp_dir: Path to temp directory :type tmp_dir: str :param cache: Local cache object. :type cache: Cache :param hostname: Hostname :type hostname: str """ LOG.info('Restoring %s in %s', copy, dst_dir) mkdir_p(dst_dir) dst = None restore_start = time.time() try: xtrabackup_binary = config.get('mysql', 'xtrabackup_binary') except ConfigParser.NoOptionError: xtrabackup_binary = XTRABACKUP_BINARY try: xbstream_binary = config.get('mysql', 'xbstream_binary') except ConfigParser.NoOptionError: xbstream_binary = XBSTREAM_BINARY try: keep_local_path = config.get('destination', 'keep_local_path') if osp.exists(osp.join(keep_local_path, copy.key)): dst = Local(keep_local_path) except ConfigParser.NoOptionError: pass if not dst: if not hostname: hostname = copy.host if not hostname: raise DestinationError('Failed to get hostname from %s' % copy) dst = get_destination(config, hostname=hostname) key = copy.key status = dst.status() stream = dst.get_stream(copy) if status[key].type == "full": cache_key = os.path.basename(key) if cache: if cache_key in cache: # restore from cache cache.restore_in(cache_key, dst_dir) else: restore_from_mysql_full(stream, dst_dir, config, redo_only=False, xtrabackup_binary=xtrabackup_binary, xbstream_binary=xbstream_binary) cache.add(dst_dir, cache_key) else: restore_from_mysql_full(stream, dst_dir, config, redo_only=False, xtrabackup_binary=xtrabackup_binary, xbstream_binary=xbstream_binary) else: full_copy = status.candidate_parent(copy.run_type) full_stream = dst.get_stream(full_copy) LOG.debug("Full parent copy is %s", full_copy.key) cache_key = os.path.basename(full_copy.key) if cache: if cache_key in cache: # restore from cache cache.restore_in(cache_key, dst_dir) else: restore_from_mysql_full(full_stream, dst_dir, config, redo_only=True, xtrabackup_binary=xtrabackup_binary, xbstream_binary=xbstream_binary) cache.add(dst_dir, cache_key) else: restore_from_mysql_full(full_stream, dst_dir, config, redo_only=True, xtrabackup_binary=xtrabackup_binary, xbstream_binary=xbstream_binary) restore_from_mysql_incremental(stream, dst_dir, config, tmp_dir, xtrabackup_binary=xtrabackup_binary, xbstream_binary=xbstream_binary) config_dir = os.path.join(dst_dir, "_config") for path, content in get_my_cnf(status, key): config_sub_dir = os.path.join(config_dir, os.path.dirname(path).lstrip('/')) mkdir_p(config_sub_dir, mode=0755) with open(os.path.join(config_sub_dir, os.path.basename(path)), 'w') as mysql_config: mysql_config.write(content) update_grastate(dst_dir, status, key) export_info(config, data=time.time() - restore_start, category=ExportCategory.mysql, measure_type=ExportMeasureType.restore) LOG.info('Successfully restored %s in %s.', copy.key, dst_dir) LOG.info( 'Now copy content of %s to MySQL datadir: ' 'cp -R %s /var/lib/mysql/', dst_dir, osp.join(dst_dir, '*')) LOG.info('Fix permissions: chown -R mysql:mysql /var/lib/mysql/') LOG.info( 'Make sure innodb_log_file_size and innodb_log_files_in_group ' 'in %s/backup-my.cnf and in /etc/my.cnf are same.', dst_dir) if osp.exists(config_dir): LOG.info('Original my.cnf is restored in %s.', config_dir) LOG.info('Then you can start MySQL normally.')
def restore_from_file(twindb_config, copy, dst_dir): """ Restore a directory from a backup copy in the directory :param twindb_config: tool configuration :type twindb_config: TwinDBBackupConfig :param copy: Instance of BaseCopy or and inheriting classes. :type copy: BaseCopy :param dst_dir: Path to destination directory. Must exist and be empty. :type dst_dir: str """ LOG.info('Restoring %s in %s', copy.key, dst_dir) mkdir_p(dst_dir) restore_start = time.time() keep_local_path = twindb_config.keep_local_path if keep_local_path and os.path.exists(osp.join(keep_local_path, copy.key)): dst = Local(osp.join(keep_local_path, copy.key)) stream = dst.get_stream(copy) else: dst = twindb_config.destination() stream = dst.get_stream(copy) # GPG modifier if twindb_config.gpg: gpg = Gpg( stream, twindb_config.gpg.recipient, twindb_config.gpg.keyring, secret_keyring=twindb_config.gpg.secret_keyring ) LOG.debug('Decrypting stream') stream = gpg.revert_stream() else: LOG.debug('Not decrypting the stream') with stream as handler: try: LOG.debug('handler type: %s', type(handler)) LOG.debug('stream type: %s', type(stream)) cmd = ["tar", "zvxf", "-"] LOG.debug('Running %s', ' '.join(cmd)) proc = Popen(cmd, stdin=handler, cwd=dst_dir) cout, cerr = proc.communicate() ret = proc.returncode if ret: LOG.error('%s exited with code %d', cmd, ret) if cout: LOG.error('STDOUT: %s', cout) if cerr: LOG.error('STDERR: %s', cerr) return LOG.info('Successfully restored %s in %s', copy.key, dst_dir) except (OSError, DestinationError) as err: LOG.error('Failed to decompress %s: %s', copy.key, err) exit(1) export_info( twindb_config, data=time.time() - restore_start, category=ExportCategory.files, measure_type=ExportMeasureType.restore )
def restore_from_mysql(twindb_config, copy, dst_dir, tmp_dir=None, cache=None, hostname=None): """ Restore MySQL datadir in a given directory :param twindb_config: tool configuration :type twindb_config: TwinDBBackupConfig :param copy: Backup copy instance. :type copy: MySQLCopy :param dst_dir: Destination directory. Must exist and be empty. :type dst_dir: str :param tmp_dir: Path to temp directory :type tmp_dir: str :param cache: Local cache object. :type cache: Cache :param hostname: Hostname :type hostname: str """ LOG.info('Restoring %s in %s', copy, dst_dir) mkdir_p(dst_dir) dst = None restore_start = time.time() keep_local_path = twindb_config.keep_local_path if keep_local_path and osp.exists(osp.join(keep_local_path, copy.key)): dst = Local(twindb_config.keep_local_path) if not dst: if not hostname: hostname = copy.host if not hostname: raise DestinationError( 'Failed to get hostname from %s' % copy ) dst = twindb_config.destination(backup_source=hostname) key = copy.key status = MySQLStatus(dst=dst) stream = dst.get_stream(copy) if status[key].type == "full": cache_key = os.path.basename(key) if cache: if cache_key in cache: # restore from cache cache.restore_in(cache_key, dst_dir) else: restore_from_mysql_full( stream, dst_dir, twindb_config, redo_only=False ) cache.add(dst_dir, cache_key) else: restore_from_mysql_full( stream, dst_dir, twindb_config, redo_only=False) else: full_copy = status.candidate_parent( copy.run_type ) full_stream = dst.get_stream(full_copy) LOG.debug("Full parent copy is %s", full_copy.key) cache_key = os.path.basename(full_copy.key) if cache: if cache_key in cache: # restore from cache cache.restore_in(cache_key, dst_dir) else: restore_from_mysql_full( full_stream, dst_dir, twindb_config, redo_only=True ) cache.add(dst_dir, cache_key) else: restore_from_mysql_full( full_stream, dst_dir, twindb_config, redo_only=True ) restore_from_mysql_incremental( stream, dst_dir, twindb_config, tmp_dir ) config_dir = os.path.join(dst_dir, "_config") for path, content in get_my_cnf(status, key): config_sub_dir = os.path.join( config_dir, os.path.dirname(path).lstrip('/') ) mkdir_p(config_sub_dir, mode=0755) with open(os.path.join(config_sub_dir, os.path.basename(path)), 'w') as mysql_config: mysql_config.write(content) update_grastate(dst_dir, status, key) export_info(twindb_config, data=time.time() - restore_start, category=ExportCategory.mysql, measure_type=ExportMeasureType.restore) LOG.info('Successfully restored %s in %s.', copy.key, dst_dir) LOG.info('Now copy content of %s to MySQL datadir: ' 'cp -R %s /var/lib/mysql/', dst_dir, osp.join(dst_dir, '*')) LOG.info('Fix permissions: chown -R mysql:mysql /var/lib/mysql/') LOG.info('Make sure innodb_log_file_size and innodb_log_files_in_group ' 'in %s/backup-my.cnf and in /etc/my.cnf are same.', dst_dir) if osp.exists(config_dir): LOG.info('Original my.cnf is restored in %s.', config_dir) LOG.info('Then you can start MySQL normally.')
def get_container( name, client, network, datadir=None, bootstrap_script=None, last_n=1, twindb_config_dir=None, image=NODE_IMAGE, ): api = client.api api.pull(image) cwd = os.getcwd() LOG.debug("Current directory: %s", cwd) binds = { cwd: { "bind": "/twindb-backup", "mode": "rw", } } if twindb_config_dir: LOG.debug("TwinDB config directory: %s", twindb_config_dir) mkdir_p(twindb_config_dir, mode=0o755) binds[twindb_config_dir] = { "bind": "/etc/twindb", "mode": "rw", } if datadir: binds[datadir] = { "bind": "/var/lib/mysql", "mode": "rw", } host_config = api.create_host_config( binds=binds, dns=["8.8.8.8", "208.67.222.222", "208.67.220.220"]) ip = "172.%d.3.%d" % (network["second_octet"], last_n) networking_config = api.create_networking_config( {network["NAME"]: api.create_endpoint_config(ipv4_address=ip)}) LOG.debug(networking_config) container_hostname = "%s_%d" % (name, last_n) kwargs = { "image": image, "name": container_hostname, "ports": [22, 3306], "hostname": container_hostname, "host_config": host_config, "networking_config": networking_config, "volumes": ["/twindb-backup"], "environment": {}, } try: kwargs["environment"] = {"DEV": os.environ["DEV"]} except KeyError: pass if bootstrap_script: kwargs["command"] = "bash %s" % bootstrap_script container = api.create_container(**kwargs) container["ip"] = ip LOG.info("Created container %r", container) try: api.start(container["Id"]) LOG.info("Started %r", container) return container except APIError as err: LOG.error(err) client.api.remove_container(container=container["Id"], force=True)
def get_container(name, client, network, datadir=None, bootstrap_script=None, last_n=1, twindb_config_dir=None, image=NODE_IMAGE): api = client.api api.pull(image) cwd = os.getcwd() LOG.debug('Current directory: %s', cwd) binds = { cwd: { 'bind': '/twindb-backup', 'mode': 'rw', } } if twindb_config_dir: LOG.debug('TwinDB config directory: %s', twindb_config_dir) mkdir_p(twindb_config_dir, mode=0755) binds[twindb_config_dir] = { 'bind': '/etc/twindb', 'mode': 'rw', } if datadir: binds[datadir] = { 'bind': '/var/lib/mysql', 'mode': 'rw', } host_config = api.create_host_config( binds=binds, dns=['8.8.8.8', '208.67.222.222', '208.67.220.220']) ip = '172.%d.3.%d' % (network['second_octet'], last_n) networking_config = api.create_networking_config( {network['NAME']: api.create_endpoint_config(ipv4_address=ip)}) LOG.debug(networking_config) container_hostname = '%s_%d' % (name, last_n) kwargs = { 'image': image, 'name': container_hostname, 'ports': [22, 3306], 'hostname': container_hostname, 'host_config': host_config, 'networking_config': networking_config, 'volumes': ['/twindb-backup'], 'environment': {} } try: kwargs['environment'] = {'DEV': os.environ['DEV']} except KeyError: pass if bootstrap_script: kwargs['command'] = 'bash %s' % bootstrap_script container = api.create_container(**kwargs) container['ip'] = ip LOG.info('Created container %r', container) try: api.start(container['Id']) LOG.info('Started %r', container) return container except APIError as err: LOG.error(err) client.api.remove_container(container=container['Id'], force=True)
def path(self): """ Root path on local file system where local backup copies are stored. """ mkdir_p(self._path) return self._path
def __init__(self, path=None): super(Local, self).__init__(path) self._path = path self.remote_path = self.path mkdir_p(self.path)
def restore_from_mysql(config, backup_copy, dst_dir, cache=None): """ Restore MySQL datadir in a given directory :param config: Tool configuration. :type config: ConfigParser.ConfigParser :param backup_copy: Backup copy name. :type backup_copy: str :param dst_dir: Destination directory. Must exist and be empty. :type dst_dir: str :param cache: Local cache object. :type cache: Cache """ LOG.info('Restoring %s in %s', backup_copy, dst_dir) mkdir_p(dst_dir) dst = None try: keep_local_path = config.get('destination', 'keep_local_path') if os.path.exists(backup_copy) \ and backup_copy.startswith(keep_local_path): dst = Local(keep_local_path) except ConfigParser.NoOptionError: pass if not dst: hostname = get_hostname_from_backup_copy(backup_copy) if not hostname: raise DestinationError('Failed to get hostname from %s' % backup_copy) dst = get_destination(config, hostname=hostname) key = dst.basename(backup_copy) status = dst.status() stream = dst.get_stream(backup_copy) if get_backup_type(status, key) == "full": cache_key = os.path.basename(key) if cache: if cache_key in cache: # restore from cache cache.restore_in(cache_key, dst_dir) else: restore_from_mysql_full(stream, dst_dir, config) cache.add(dst_dir, cache_key) else: restore_from_mysql_full(stream, dst_dir, config) else: full_copy = dst.get_full_copy_name(backup_copy) full_stream = dst.get_stream(full_copy) cache_key = os.path.basename(full_copy) if cache: if cache_key in cache: # restore from cache cache.restore_in(cache_key, dst_dir) else: restore_from_mysql_full(full_stream, dst_dir, config, redo_only=True) cache.add(dst_dir, cache_key) else: restore_from_mysql_full(full_stream, dst_dir, config, redo_only=True) restore_from_mysql_incremental(stream, dst_dir, config) config_dir = os.path.join(dst_dir, "_config") for path, content in get_my_cnf(status, key): config_sub_dir = os.path.join(config_dir, os.path.dirname(path).lstrip('/')) os.makedirs(config_sub_dir) with open(os.path.join(config_sub_dir, os.path.basename(path)), 'w') as mysql_config: mysql_config.write(content) update_grastate(dst_dir, status, key) LOG.info('Successfully restored %s in %s.', backup_copy, dst_dir) LOG.info('Now copy content of %s to MySQL datadir: ' 'cp -R %s/* /var/lib/mysql/', dst_dir, dst_dir) LOG.info('Fix permissions: chown -R mysql:mysql /var/lib/mysql/') LOG.info('Make sure innodb_log_file_size and innodb_log_files_in_group ' 'in %s/backup-my.cnf and in /etc/my.cnf are same.', dst_dir) if os.path.exists(config_dir): LOG.info('Original my.cnf is restored in %s.', config_dir) LOG.info('Then you can start MySQL normally.')
def restore_from_mysql(twindb_config, copy, dst_dir, tmp_dir=None, cache=None, hostname=None): """ Restore MySQL datadir in a given directory :param twindb_config: tool configuration :type twindb_config: TwinDBBackupConfig :param copy: Backup copy instance. :type copy: MySQLCopy :param dst_dir: Destination directory. Must exist and be empty. :type dst_dir: str :param tmp_dir: Path to temp directory :type tmp_dir: str :param cache: Local cache object. :type cache: Cache :param hostname: Hostname :type hostname: str """ LOG.info("Restoring %s in %s", copy, dst_dir) mkdir_p(dst_dir) dst = None restore_start = time.time() keep_local_path = twindb_config.keep_local_path if keep_local_path and osp.exists(osp.join(keep_local_path, copy.key)): dst = Local(twindb_config.keep_local_path) if not dst: if not hostname: hostname = copy.host if not hostname: raise DestinationError("Failed to get hostname from %s" % copy) dst = twindb_config.destination(backup_source=hostname) key = copy.key status = MySQLStatus(dst=dst, status_directory=hostname) stream = dst.get_stream(copy) if status[key].type == "full": cache_key = os.path.basename(key) if cache: if cache_key in cache: # restore from cache cache.restore_in(cache_key, dst_dir) else: restore_from_mysql_full(stream, dst_dir, twindb_config, redo_only=False) cache.add(dst_dir, cache_key) else: restore_from_mysql_full(stream, dst_dir, twindb_config, redo_only=False) else: full_copy = status.candidate_parent(copy.run_type) full_stream = dst.get_stream(full_copy) LOG.debug("Full parent copy is %s", full_copy.key) cache_key = os.path.basename(full_copy.key) if cache: if cache_key in cache: # restore from cache cache.restore_in(cache_key, dst_dir) else: restore_from_mysql_full(full_stream, dst_dir, twindb_config, redo_only=True) cache.add(dst_dir, cache_key) else: restore_from_mysql_full(full_stream, dst_dir, twindb_config, redo_only=True) restore_from_mysql_incremental(stream, dst_dir, twindb_config, tmp_dir) config_dir = os.path.join(dst_dir, "_config") for path, content in get_my_cnf(status, key): config_sub_dir = os.path.join(config_dir, os.path.dirname(path).lstrip("/")) mkdir_p(config_sub_dir, mode=0o755) with open(os.path.join(config_sub_dir, os.path.basename(path)), "w") as mysql_config: mysql_config.write(content) update_grastate(dst_dir, status, key) export_info( twindb_config, data=time.time() - restore_start, category=ExportCategory.mysql, measure_type=ExportMeasureType.restore, ) LOG.info("Successfully restored %s in %s.", copy.key, dst_dir) LOG.info( "Now copy content of %s to MySQL datadir: " "cp -R %s /var/lib/mysql/", dst_dir, osp.join(dst_dir, "*"), ) LOG.info("Fix permissions: chown -R mysql:mysql /var/lib/mysql/") LOG.info( "Make sure innodb_log_file_size and innodb_log_files_in_group " "in %s/backup-my.cnf and in /etc/my.cnf are same.", dst_dir, ) if osp.exists(config_dir): LOG.info("Original my.cnf is restored in %s.", config_dir) LOG.info("Then you can start MySQL normally.")