def __init__(self, path, mode, argv, level, inline, split=False): self.argv = argv self.level = level self.inline = inline if not inline: if split: LOG.warning("The split option only works if inline is enabled") self.fileobj = io.open(os.path.splitext(path)[0], mode) self.filehandle = self.fileobj.fileno() else: if level: if "gpg" in argv[0]: argv += ["-z%d" % level] else: argv += ["-%d" % level] self.stderr = TemporaryFile() LOG.debug("* Executing: %s", subprocess.list2cmdline(argv)) if split: split_args = [which("split"), "-a5", "--bytes=1G", "-", path + "."] LOG.debug("* Splitting dump file with: %s", subprocess.list2cmdline(split_args)) self.pid = subprocess.Popen( argv, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=self.stderr ) self.split = subprocess.Popen(split_args, stdin=self.pid.stdout, stderr=self.stderr) else: self.fileobj = io.open(path, "w") self.stderr = TemporaryFile() self.pid = subprocess.Popen( argv, stdin=subprocess.PIPE, stdout=self.fileobj.fileno(), stderr=self.stderr ) self.filehandle = self.pid.stdin.fileno() self.name = path self.closed = False
def apply_xtrabackup_logfile(xb_cfg, backupdir): """Apply xtrabackup_logfile via innobackupex --apply-log [options]""" # run ${innobackupex} --apply-log ${backupdir} # only applies when streaming is not used stream_method = determine_stream_method(xb_cfg['stream']) if stream_method is not None: LOG.warning("Skipping --prepare/--apply-logs since backup is streamed") return if '--compress' in xb_cfg['additional-options']: LOG.warning("Skipping --apply-logs since --compress option appears " "to have been used.") return innobackupex = xb_cfg['innobackupex'] if not isabs(innobackupex): try: innobackupex = which(innobackupex) except WhichError: raise BackupError("Failed to find innobackupex script") args = [ innobackupex, '--apply-log', backupdir ] cmdline = list2cmdline(args) LOG.info("Executing: %s", cmdline) try: process = Popen(args, stdout=PIPE, stderr=STDOUT, close_fds=True) except OSError, exc: raise BackupError("Failed to run %s: [%d] %s", cmdline, exc.errno, exc.strerror)
def apply_mariabackup_logfile(mb_cfg, backupdir): """Apply mariabackup_logfile via mariabackup --prepare [options]""" # run ${innobackupex} --prepare ${backupdir} # only applies when streaming is not used stream_method = determine_stream_method(mb_cfg["stream"]) if stream_method is not None: LOG.warning("Skipping --prepare since backup is streamed") return if "--compress" in mb_cfg["additional-options"]: LOG.warning("Skipping --prepare since --compress option appears " "to have been used.") return innobackupex = mb_cfg["innobackupex"] if not isabs(innobackupex): innobackupex = which(innobackupex) args = [ innobackupex, "--prepare", "--target-dir=" + join(backupdir, "data") ] cmdline = list2cmdline(args) LOG.info("Executing: %s", cmdline) try: process = Popen(args, stdout=PIPE, stderr=STDOUT, close_fds=True) except OSError as exc: raise BackupError("Failed to run %s: [%d] %s" % cmdline, exc.errno, exc.strerror) for line in process.stdout: LOG.info("%s", line.rstrip()) process.wait() if process.returncode != 0: raise BackupError("%s returned failure status [%d]" % (cmdline, process.returncode))
def apply_xtrabackup_logfile(xb_cfg, backupdir): """Apply xtrabackup_logfile via innobackupex --apply-log [options]""" # run ${innobackupex} --apply-log ${backupdir} # only applies when streaming is not used stream_method = determine_stream_method(xb_cfg['stream']) if stream_method is not None: LOG.warning("Skipping --prepare/--apply-logs since backup is streamed") return if '--compress' in xb_cfg['additional-options']: LOG.warning("Skipping --apply-logs since --compress option appears " "to have been used.") return innobackupex = xb_cfg['innobackupex'] if not isabs(innobackupex): innobackupex = which(innobackupex) args = [innobackupex, '--apply-log', backupdir] cmdline = list2cmdline(args) LOG.info("Executing: %s", cmdline) try: process = Popen(args, stdout=PIPE, stderr=STDOUT, close_fds=True) except OSError as exc: raise BackupError("Failed to run %s: [%d] %s" % cmdline, exc.errno, exc.strerror) for line in process.stdout: LOG.info("%s", line.rstrip()) process.wait() if process.returncode != 0: raise BackupError("%s returned failure status [%d]" % (cmdline, process.returncode))
def apply_mariabackup_logfile(mb_cfg, backupdir): """Apply mariabackup_logfile via mariabackup --prepare [options]""" # run ${innobackupex} --prepare ${backupdir} # only applies when streaming is not used stream_method = determine_stream_method(mb_cfg["stream"]) if stream_method is not None: LOG.warning("Skipping --prepare since backup is streamed") return if "--compress" in mb_cfg["additional-options"]: LOG.warning("Skipping --prepare since --compress option appears " "to have been used.") return innobackupex = mb_cfg["innobackupex"] if not isabs(innobackupex): innobackupex = which(innobackupex) args = [innobackupex, "--prepare", "--target-dir=" + join(backupdir, "data")] cmdline = list2cmdline(args) LOG.info("Executing: %s", cmdline) try: process = Popen(args, stdout=PIPE, stderr=STDOUT, close_fds=True) except OSError as exc: raise BackupError("Failed to run %s: [%d] %s" % cmdline, exc.errno, exc.strerror) for line in process.stdout: LOG.info("%s", line.rstrip()) process.wait() if process.returncode != 0: raise BackupError("%s returned failure status [%d]" % (cmdline, process.returncode))
def xtrabackup_version(): """Get xtrabackup version""" xtrabackup_binary = "xtrabackup" if not isabs(xtrabackup_binary): xtrabackup_binary = which(xtrabackup_binary) xb_version = [xtrabackup_binary, "--version"] cmdline = list2cmdline(xb_version) LOG.info("Executing: %s", cmdline) try: process = Popen(xb_version, stdout=PIPE, stderr=STDOUT, close_fds=True) except OSError as exc: raise BackupError("Failed to run %s: [%d] %s" % cmdline, exc.errno, exc.strerror) for line in process.stdout: if isinstance(line, bytes): line = line.rstrip().decode("UTF-8") if "version" in line: xtrabackup_version = re.search(r"version\s*([\d.]+)", line).group(1) LOG.info("%s", line) process.wait() if process.returncode != 0: raise BackupError("%s returned failure status [%d]" % (cmdline, process.returncode)) return xtrabackup_version
def locate_mysqld_exe(config): """find mysqld executable""" mysqld_candidates = config.pop("mysqld-exe") for candidate in mysqld_candidates: if os.path.isfile(candidate): return candidate try: return which(candidate) except BackupError: pass raise BackupError("Failed to find mysqld binary")
def locate_mysqld_exe(config): """find mysqld executable""" mysqld_candidates = config.pop('mysqld-exe') for candidate in mysqld_candidates: if os.path.isfile(candidate): return candidate try: return which(candidate) except BackupError: pass raise BackupError("Failed to find mysqld binary")
def locate_mysqld_exe(config): mysqld_candidates = config.pop('mysqld-exe') for candidate in mysqld_candidates: if os.path.isabs(candidate): path = [os.path.dirname(candidate)] candidate = os.path.basename(candidate) else: path = None # use environ[PATH] LOG.debug("Searching for %s on path %s", candidate, path or os.environ['PATH']) return which(candidate, path) raise BackupError("Failed to find mysqld binary")
def lookup_compression(method): """ Looks up the passed compression method in supported COMPRESSION_METHODS and returns a tuple in the form of ('command_name', 'file_extension'). Arguments: method -- A string identifier of the compression method (i.e. 'gzip'). """ try: cmd, ext = COMPRESSION_METHODS[method] argv = shlex.split(cmd) return [which(argv[0])] + argv[1:], ext except KeyError: raise OSError("Unsupported compression method '%s'" % method)
def locate_mysqld_exe(config): mysqld_candidates = config.pop('mysqld-exe') for candidate in mysqld_candidates: if os.path.isabs(candidate): path = [os.path.dirname(candidate)] candidate = os.path.basename(candidate) else: path = None # use environ[PATH] try: LOG.debug("Searching for %s on path %s", candidate, path or os.environ['PATH']) return which(candidate, path) except WhichError: LOG.debug("mysqld path %s does not exist - skipping", candidate) raise BackupError("Failed to find mysqld binary")
def mariabackup_version(): """Check Mariabackup version""" mariabackup_binary = "mariabackup" if not isabs(mariabackup_binary): mariabackup_binary = which(mariabackup_binary) mb_version = [mariabackup_binary, "--version"] cmdline = list2cmdline(mb_version) LOG.info("Executing: %s", cmdline) try: process = Popen(mb_version, stdout=PIPE, stderr=STDOUT, close_fds=True) except OSError as exc: raise BackupError("Failed to run %s: [%d] %s" % cmdline, exc.errno, exc.strerror) for line in process.stdout: LOG.info("%s", line.rstrip()) process.wait() if process.returncode != 0: raise BackupError("%s returned failure status [%d]" % (cmdline, process.returncode))
def build_mb_args(config, basedir, defaults_file=None): """Build the commandline for mariabackup""" innobackupex = config['innobackupex'] if not isabs(innobackupex): try: innobackupex = which(innobackupex) except WhichError: raise BackupError("Failed to find innobackupex script") ibbackup = config['ibbackup'] stream = determine_stream_method(config['stream']) tmpdir = evaluate_tmpdir(config['tmpdir'], basedir) slave_info = config['slave-info'] safe_slave_backup = config['safe-slave-backup'] no_lock = config['no-lock'] # filter additional options to remove any empty values extra_opts = [_f for _f in config['additional-options'] if _f] args = [ innobackupex, ] if defaults_file: args.append('--defaults-file=' + defaults_file) args.append('--backup') if ibbackup: args.append('--ibbackup=' + ibbackup) if stream: args.append('--stream=' + stream) else: basedir = join(basedir, 'data') if tmpdir: args.append('--tmpdir=' + tmpdir) if slave_info: args.append('--slave-info') if safe_slave_backup: args.append('--safe-slave-backup') if no_lock: args.append('--no-lock') args.append('--no-timestamp') if extra_opts: args.extend(extra_opts) if basedir: args.append('--target-dir=' + basedir) return args
def build_xb_args(config, basedir, defaults_file=None): """Build the commandline for xtrabackup""" innobackupex = config['innobackupex'] if not isabs(innobackupex): try: innobackupex = which(innobackupex) except WhichError: raise BackupError("Failed to find innobackupex script") ibbackup = config['ibbackup'] stream = determine_stream_method(config['stream']) tmpdir = evaluate_tmpdir(config['tmpdir'], basedir) slave_info = config['slave-info'] safe_slave_backup = config['safe-slave-backup'] no_lock = config['no-lock'] # filter additional options to remove any empty values extra_opts = filter(None, config['additional-options']) args = [ innobackupex, ] if defaults_file: args.append('--defaults-file=' + defaults_file) if ibbackup: args.append('--ibbackup=' + ibbackup) if stream: args.append('--stream=' + stream) else: basedir = join(basedir, 'data') if tmpdir: args.append('--tmpdir=' + tmpdir) if slave_info: args.append('--slave-info') if safe_slave_backup: args.append('--safe-slave-backup') if no_lock: args.append('--no-lock') args.append('--no-timestamp') if extra_opts: args.extend(extra_opts) if basedir: args.append(basedir) return args
def backup(self): """ Start a backup. This attempts one or more mysqldump runs. On error, a BackupError exception will be thrown. """ args = self._build_args() if maatkit_version('mk-parallel-dump') >= 3712: args = make_compat_args(args) LOGGER.info("mk-parallel-dump %s", ' '.join(args)) mk_pdump = which('mk-parallel-dump') pid = subprocess.Popen([mk_pdump] + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) for line in pid.stdout: LOGGER.info(line.rstrip()) for line in pid.stderr: LOGGER.error(line.rstrip()) status = pid.wait() if status != 0: LOGGER.error("mk-parallel-dump failed (status=%d)", status) raise BackupError("mk-parallel-dump exited with %d" % status)
def xtrabackup_version(): """Get xtrabackup version""" xtrabackup_binary = 'xtrabackup' if not isabs(xtrabackup_binary): xtrabackup_binary = which(xtrabackup_binary) xb_version = [xtrabackup_binary, '--version'] cmdline = list2cmdline(xb_version) LOG.info("Executing: %s", cmdline) try: process = Popen(xb_version, stdout=PIPE, stderr=STDOUT, close_fds=True) except OSError as exc: raise BackupError("Failed to run %s: [%d] %s" % cmdline, exc.errno, exc.strerror) for line in process.stdout: LOG.info("%s", line.rstrip().decode('UTF-8')) process.wait() if process.returncode != 0: raise BackupError("%s returned failure status [%d]" % (cmdline, process.returncode))
def build_mb_args(config, basedir, defaults_file=None): """Build the commandline for mariabackup""" innobackupex = config["innobackupex"] if not isabs(innobackupex): innobackupex = which(innobackupex) ibbackup = config["ibbackup"] stream = determine_stream_method(config["stream"]) tmpdir = evaluate_tmpdir(config["tmpdir"], basedir) slave_info = config["slave-info"] safe_slave_backup = config["safe-slave-backup"] no_lock = config["no-lock"] # filter additional options to remove any empty values extra_opts = [_f for _f in config["additional-options"] if _f] args = [innobackupex] if defaults_file: args.append("--defaults-file=" + defaults_file) args.append("--backup") if ibbackup: args.append("--ibbackup=" + ibbackup) if stream: args.append("--stream=" + stream) else: basedir = join(basedir, "data") if tmpdir: args.append("--tmpdir=" + tmpdir) if slave_info: args.append("--slave-info") if safe_slave_backup: args.append("--safe-slave-backup") if no_lock: args.append("--no-lock") args.append("--no-timestamp") if extra_opts: args.extend(extra_opts) if basedir: args.append("--target-dir=" + basedir) return args
def __init__(self, path, mode, argv, level, inline, split=False): self.argv = argv self.level = level self.inline = inline if not inline: if split: LOG.warning("The split option only works if inline is enabled") self.fileobj = io.open(os.path.splitext(path)[0], mode) self.filehandle = self.fileobj.fileno() else: if level: if "gpg" in argv[0]: argv += ["-z%d" % level] else: argv += ["-%d" % level] self.stderr = TemporaryFile() LOG.debug("* Executing: %s", subprocess.list2cmdline(argv)) if split and not DISABLE_SPLIT: split_args = [which("split"), "-a5", "--bytes=1G", "-", path + "."] LOG.debug("* Splitting dump file with: %s", subprocess.list2cmdline(split_args)) self.pid = subprocess.Popen( argv, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=self.stderr ) self.split = subprocess.Popen(split_args, stdin=self.pid.stdout, stderr=self.stderr) else: if DISABLE_SPLIT: LOG.info("Split option is not supported with this version of subprocess module") self.fileobj = io.open(path, "w") self.stderr = TemporaryFile() self.pid = subprocess.Popen( argv, stdin=subprocess.PIPE, stdout=self.fileobj.fileno(), stderr=self.stderr ) self.filehandle = self.pid.stdin.fileno() self.name = path self.closed = False
class MockConfig(ConfigObj): def validate_config(self, *args, **kw): pass config = MockConfig() config['sqlite'] = { 'databases' : [os.path.join(os.path.dirname(__file__), 'sqlite.db')] } config['compression'] = { 'method': 'gzip', 'inline': 'yes', 'level': 1 } try: config['sqlite']['binary'] = which('sqlite') except WhichError, e: try: config['sqlite']['binary'] = which('sqlite3') except WhichError, e: raise Exception, "Unable to find sqlite binary" def setup_func(): "set up test fixtures" config['tmpdir'] = mkdtemp() def teardown_func(): "tear down test fixtures" if os.path.exists(config['tmpdir']): shutil.rmtree(config['tmpdir'])
from holland.lib.which import which class MockConfig(ConfigObj): def validate_config(self, *args, **kw): pass config = MockConfig() config['sqlite'] = { 'databases': [os.path.join(os.path.dirname(__file__), 'sqlite.db')] } config['compression'] = {'method': 'gzip', 'inline': 'yes', 'level': 1} try: config['sqlite']['binary'] = which('sqlite') except Exception: config['sqlite']['binary'] = which('sqlite3') def setup_func(): "set up test fixtures" config['tmpdir'] = mkdtemp() def teardown_func(): "tear down test fixtures" if os.path.exists(config['tmpdir']): shutil.rmtree(config['tmpdir'])