Пример #1
0
 def open_xb_stdout(self):
     """Open the stdout output for a streaming xtrabackup run"""
     config = self.config["xtrabackup"]
     backup_directory = self.target_directory
     stream = util.determine_stream_method(config["stream"])
     if stream:
         if stream == "tar":
             archive_path = join(backup_directory, "backup.tar")
         elif stream == "xbstream":
             archive_path = join(backup_directory, "backup.xb")
         else:
             raise BackupError("Unknown stream method '%s'" % stream)
         zconfig = self.config["compression"]
         try:
             return open_stream(
                 archive_path,
                 "w",
                 method=zconfig["method"],
                 level=zconfig["level"],
                 extra_args=zconfig["options"],
                 inline=zconfig["inline"],
             )
         except OSError as exc:
             raise BackupError("Unable to create output file: %s" % exc)
     else:
         return open("/dev/null", "w")
Пример #2
0
    def backup(self):
        """
        Create backup
        """
        if self.dry_run:
            return
        if not os.path.exists(self.config['tar']['directory']) \
         or not os.path.isdir(self.config['tar']['directory']):
            raise BackupError('{0} is not a directory!'.format(self.config['tar']['directory']))
        out_name = "{0}.tar".format(
            self.config['tar']['directory'].lstrip('/').replace('/', '_'))
        outfile = os.path.join(self.target_directory, out_name)
        args = ['tar', 'c', self.config['tar']['directory']]
        errlog = TemporaryFile()
        stream = self._open_stream(outfile, 'w')
        LOG.info("Executing: %s", list2cmdline(args))
        pid = Popen(
            args,
            stdout=stream.fileno(),
            stderr=errlog.fileno(),
            close_fds=True)
        status = pid.wait()
        try:
            errlog.flush()
            errlog.seek(0)
            for line in errlog:
                LOG.error("%s[%d]: %s", list2cmdline(args), pid.pid, line.rstrip())
        finally:
            errlog.close()

        if status != 0:
            raise BackupError('tar failed (status={0})'.format(status))
Пример #3
0
    def _backup(self):
        """Real backup method.  May raise BackupError exceptions"""
        config = self.config['mysqldump']
        # setup defaults_file with ignore-table exclusions
        defaults_file = os.path.join(self.target_directory, 'my.cnf')
        write_options(self.mysql_config, defaults_file)
        if config['exclude-invalid-views']:
            LOG.info("* Finding and excluding invalid views...")
            definitions_path = os.path.join(self.target_directory,
                                            'invalid_views.sql')
            exclude_invalid_views(self.schema, self.client, definitions_path)
        add_exclusions(self.schema, defaults_file)
        # find the path to the mysqldump command
        mysqldump_bin = find_mysqldump(path=config['mysql-binpath'])
        LOG.info("Using mysqldump executable: %s", mysqldump_bin)
        # setup the mysqldump environment
        extra_defaults = config['extra-defaults']
        try:
            mysqldump = MySQLDump(defaults_file,
                                  mysqldump_bin,
                                  extra_defaults=extra_defaults)
        except MySQLDumpError as exc:
            raise BackupError(str(exc))
        except Exception as ex:
            LOG.warning(ex)
        LOG.info("mysqldump version %s",
                 '.'.join([str(digit) for digit in mysqldump.version]))
        options = collect_mysqldump_options(config, mysqldump, self.client)
        validate_mysqldump_options(mysqldump, options)

        os.mkdir(os.path.join(self.target_directory, 'backup_data'))

        if self.config['compression']['method'] != 'none' and \
            self.config['compression']['level'] > 0:
            try:
                cmd, ext = lookup_compression(
                    self.config['compression']['method'])
            except OSError as exc:
                raise BackupError(
                    "Unable to load compression method '%s': %s" %
                    (self.config['compression']['method'], exc))
            LOG.info("Using %s compression level %d with args %s",
                     self.config['compression']['method'],
                     self.config['compression']['level'],
                     self.config['compression']['options'])
        else:
            LOG.info("Not compressing mysqldump output")
            cmd = ''
            ext = ''

        try:
            start(mysqldump=mysqldump,
                  schema=self.schema,
                  lock_method=config['lock-method'],
                  file_per_database=config['file-per-database'],
                  open_stream=self._open_stream,
                  compression_ext=ext,
                  arg_per_database=config['arg-per-database'])
        except MySQLDumpError as exc:
            raise BackupError(str(exc))
Пример #4
0
    def estimate_backup_size(self):
        """Estimate the size of the backup this plugin will generate"""

        LOG.info("Estimating size of mysqldump backup")
        estimate_method = self.config['mysqldump']['estimate-method']

        if estimate_method.startswith('const:'):
            try:
                return parse_size(estimate_method[6:])
            except ValueError as exc:
                raise BackupError(str(exc))

        if estimate_method != 'plugin':
            raise BackupError("Invalid estimate-method '%s'" % estimate_method)

        try:
            db_iter = DatabaseIterator(self.client)
            tbl_iter = MetadataTableIterator(self.client)
            try:
                self.client.connect()
            except Exception as ex:
                LOG.error("Failed to connect to database")
                LOG.error("%s", ex)
                raise BackupError("MySQL Error %s" % ex)
            try:
                self.schema.refresh(db_iter=db_iter, tbl_iter=tbl_iter)
            except MySQLError as exc:
                LOG.error("Failed to estimate backup size")
                LOG.error("[%d] %s", *exc.args)
                raise BackupError("MySQL Error [%d] %s" % exc.args)
            return float(sum([db.size for db in self.schema.databases]))
        finally:
            self.client.disconnect()
Пример #5
0
    def backup(self):
        """
        Start a backup.
        """

        # estimate and setup has completed at this point
        # so ensure the connection is closed - we will never reuse this
        self.connection.close()

        if self.dry_run:
            # Very simply dry run information
            # enough to know that:
            # 1) We can connect to Postgres using pgpass data
            # 2) The exact databases we would dump
            pg_dry_run(self.databases, self.config)
            return

        # First run a pg_dumpall -g and save the globals
        # Then run a pg_dump for each database we find
        backup_dir = os.path.join(self.target_directory, 'data')

        # put everything in data/
        try:
            os.mkdir(backup_dir)
        except OSError as exc:
            raise BackupError("Failed to create backup directory %s" % backup_dir)

        try:
            backup_pgsql(backup_dir, self.config, self.databases)
        except (OSError, PgError) as exc:
            LOG.debug("Failed to backup Postgres. %s",
                      str(exc), exc_info=True)
            raise BackupError(str(exc))
Пример #6
0
def call_hooks(event, entry):
    """
    Rerun pre or post hooks
    """
    hook = event + "-command"

    if entry.config['holland:backup'][hook] is not None:
        cmd = entry.config['holland:backup'][hook]
        try:
            cmd = Template(cmd).safe_substitute(hook=hook,
                                                backupset=entry.backupset,
                                                backupdir=entry.path)
            LOG.info(" [%s]> %s", hook, cmd)
            process = Popen(cmd,
                            shell=True,
                            stdin=open("/dev/null", "r"),
                            stdout=PIPE,
                            stderr=PIPE,
                            close_fds=True)
            output, errors = process.communicate()
        except OSError as exc:
            raise BackupError("%s", exc)

        for line in errors.splitlines():
            LOG.error(" ! %s", line)
        for line in output.splitlines():
            LOG.info(" + %s", line)
        if process.returncode != 0:
            raise BackupError("%s command failed" % hook)
    return 0
Пример #7
0
    def backup(self):
        """
        Create backup
        """
        if self.dry_run:
            return
        if not os.path.exists(
                self.config["tar"]["directory"]) or not os.path.isdir(
                    self.config["tar"]["directory"]):
            raise BackupError("{0} is not a directory!".format(
                self.config["tar"]["directory"]))
        out_name = "{0}.tar".format(
            self.config["tar"]["directory"].lstrip("/").replace("/", "_"))
        outfile = os.path.join(self.target_directory, out_name)
        args = ["tar", "c", self.config["tar"]["directory"]]
        errlog = TemporaryFile()
        stream = open_stream(outfile, "w", **self.config["compression"])
        LOG.info("Executing: %s", list2cmdline(args))
        pid = Popen(args,
                    stdout=stream.fileno(),
                    stderr=errlog.fileno(),
                    close_fds=True)
        status = pid.wait()
        try:
            errlog.flush()
            errlog.seek(0)
            for line in errlog:
                LOG.error("%s[%d]: %s", list2cmdline(args), pid.pid,
                          line.rstrip())
        finally:
            errlog.close()

        if status != 0:
            raise BackupError("tar failed (status={0})".format(status))
Пример #8
0
def apply_mariabackup_logfile(mb_cfg, backupdir):
    """Apply mariabackup_logfile via mariabackup --prepare [options]"""
    # run ${innobackupex} --prepare ${backupdir}
    # only applies when streaming is not used
    stream_method = determine_stream_method(mb_cfg["stream"])
    if stream_method is not None:
        LOG.warning("Skipping --prepare since backup is streamed")
        return

    if "--compress" in mb_cfg["additional-options"]:
        LOG.warning("Skipping --prepare since --compress option appears "
                    "to have been used.")
        return

    innobackupex = mb_cfg["innobackupex"]
    if not isabs(innobackupex):
        innobackupex = which(innobackupex)
    args = [
        innobackupex, "--prepare", "--target-dir=" + join(backupdir, "data")
    ]

    cmdline = list2cmdline(args)
    LOG.info("Executing: %s", cmdline)
    try:
        process = Popen(args, stdout=PIPE, stderr=STDOUT, close_fds=True)
    except OSError as exc:
        raise BackupError("Failed to run %s: [%d] %s" % cmdline, exc.errno,
                          exc.strerror)

    for line in process.stdout:
        LOG.info("%s", line.rstrip())
    process.wait()
    if process.returncode != 0:
        raise BackupError("%s returned failure status [%d]" %
                          (cmdline, process.returncode))
Пример #9
0
    def dryrun(self, binary_xtrabackup):
        """Perform test backup"""
        from subprocess import Popen, list2cmdline, PIPE, STDOUT

        xb_cfg = self.config["xtrabackup"]
        args = util.build_xb_args(xb_cfg, self.target_directory,
                                  self.defaults_path, binary_xtrabackup)
        LOG.info("* xtrabackup command: %s", list2cmdline(args))
        args = [
            "xtrabackup", "--defaults-file=" + self.defaults_path, "--help"
        ]
        cmdline = list2cmdline(args)
        LOG.info("* Verifying generated config '%s'", self.defaults_path)
        LOG.debug("* Verifying via command: %s", cmdline)
        try:
            process = Popen(args, stdout=PIPE, stderr=STDOUT, close_fds=True)
        except OSError:
            raise BackupError("Failed to find xtrabackup binary")
        stdout = process.stdout.read()
        process.wait()
        # Note: xtrabackup --help will exit with 1 usually
        if process.returncode != 1:
            LOG.error("! %s failed. Output follows below.", cmdline)
            for line in stdout.splitlines():
                LOG.error("! %s", line)
            raise BackupError("%s exited with failure status [%d]" %
                              (cmdline, process.returncode))
Пример #10
0
def _stop_slave(client, config):
    """Stop MySQL replication"""
    try:
        client.stop_slave(sql_thread_only=True)
        LOG.info("Stopped slave")
        config['mysql:replication'] = {}
        repl_cfg = config['mysql:replication']
    except MySQLError as exc:
        raise BackupError("Failed to stop slave[%d]: %s" % exc.args)

    try:
        slave_info = client.show_slave_status()
        if slave_info:
            # update config with replication info
            log_file = slave_info['relay_master_log_file']
            log_pos = slave_info['exec_master_log_pos']
            repl_cfg['slave_master_log_file'] = log_file
            repl_cfg['slave_master_log_pos'] = log_pos
    except MySQLError as exc:
        raise BackupError("Failed to acquire slave status[%d]: %s" % \
                            exc.args)
    try:
        master_info = client.show_master_status()
        if master_info:
            repl_cfg['master_log_file'] = master_info['file']
            repl_cfg['master_log_pos'] = master_info['position']
    except MySQLError as exc:
        raise BackupError("Failed to acquire master status [%d] %s" % exc.args)

    LOG.info("MySQL Replication has been stopped.")
Пример #11
0
def apply_xtrabackup_logfile(xb_cfg, backupdir):
    """Apply xtrabackup_logfile via innobackupex --apply-log [options]"""
    # run ${innobackupex} --apply-log ${backupdir}
    # only applies when streaming is not used
    stream_method = determine_stream_method(xb_cfg['stream'])
    if stream_method is not None:
        LOG.warning("Skipping --prepare/--apply-logs since backup is streamed")
        return

    if '--compress' in xb_cfg['additional-options']:
        LOG.warning("Skipping --apply-logs since --compress option appears "
                    "to have been used.")
        return

    innobackupex = xb_cfg['innobackupex']
    if not isabs(innobackupex):
        innobackupex = which(innobackupex)
    args = [innobackupex, '--apply-log', backupdir]

    cmdline = list2cmdline(args)
    LOG.info("Executing: %s", cmdline)
    try:
        process = Popen(args, stdout=PIPE, stderr=STDOUT, close_fds=True)
    except OSError as exc:
        raise BackupError("Failed to run %s: [%d] %s" % cmdline, exc.errno,
                          exc.strerror)

    for line in process.stdout:
        LOG.info("%s", line.rstrip())
    process.wait()
    if process.returncode != 0:
        raise BackupError("%s returned failure status [%d]" %
                          (cmdline, process.returncode))
Пример #12
0
    def check(self):
        """Check if we can take a backup"""
        LOG.info("Checking that SQLite backups can run.")
        if not os.path.exists(self.sqlite_bin):
            raise BackupError("SQLite binary [%s] doesn't exist!" %
                              self.sqlite_bin)

        for database in self.config['sqlite']['databases']:
            # sometimes picks up empty string ('')
            if not database:
                continue

            path = os.path.abspath(os.path.expanduser(database))
            if not os.path.exists(path):
                LOG.error("SQLite database [%s] doesn't exist!", path)
                self.invalid_databases.append(database)
                continue

            process = Popen([self.sqlite_bin, path, '.schema'],
                            stdin=open('/dev/null', 'r'),
                            stdout=open('/dev/null', 'w'),
                            stderr=PIPE)
            _, stderroutput = process.communicate()

            if process.returncode != 0:
                LOG.error(stderroutput)
                self.invalid_databases.append(database)
            else:
                self.databases.append(database)

        if not self.databases and not self.invalid_databases:
            raise BackupError("No SQLite databases to backup!")
Пример #13
0
def xtrabackup_version():
    """Get xtrabackup version"""
    xtrabackup_binary = "xtrabackup"
    if not isabs(xtrabackup_binary):
        xtrabackup_binary = which(xtrabackup_binary)
    xb_version = [xtrabackup_binary, "--version"]
    cmdline = list2cmdline(xb_version)
    LOG.info("Executing: %s", cmdline)
    try:
        process = Popen(xb_version, stdout=PIPE, stderr=STDOUT, close_fds=True)
    except OSError as exc:
        raise BackupError("Failed to run %s: [%d] %s" % cmdline, exc.errno,
                          exc.strerror)

    for line in process.stdout:
        if isinstance(line, bytes):
            line = line.rstrip().decode("UTF-8")
        if "version" in line:
            xtrabackup_version = re.search(r"version\s*([\d.]+)",
                                           line).group(1)
        LOG.info("%s", line)

    process.wait()
    if process.returncode != 0:
        raise BackupError("%s returned failure status [%d]" %
                          (cmdline, process.returncode))
    return xtrabackup_version
Пример #14
0
    def backup(self):
        """Run a backup by running through a LVM snapshot against the device
        the MySQL datadir resides on
        """
        # connect to mysql and lookup what we're supposed to snapshot
        try:
            self.client.connect()
            datadir = os.path.realpath(self.client.show_variable("datadir"))
        except MySQLError as exc:
            raise BackupError("[%d] %s" % exc.args)

        LOG.info("Backing up %s via snapshot", datadir)
        # lookup the logical volume mysql's datadir sits on

        try:
            volume = LogicalVolume.lookup_from_fspath(datadir)
        except LookupError as exc:
            raise BackupError("Failed to lookup logical volume for %s: %s" %
                              (datadir, str(exc)))
        except Exception as ex:
            raise BackupError("Failed to lookup logical volume for %s: %s" %
                              (datadir, str(ex)))

        # create a snapshot manager
        snapshot = build_snapshot(self.config["mysql-lvm"],
                                  volume,
                                  suppress_tmpdir=self.dry_run)
        # calculate where the datadirectory on the snapshot will be located
        rpath = relpath(datadir, getmount(datadir))
        snap_datadir = os.path.abspath(os.path.join(snapshot.mountpoint,
                                                    rpath))
        # setup actions to perform at each step of the snapshot process
        setup_actions(
            snapshot=snapshot,
            config=self.config,
            client=self.client,
            snap_datadir=snap_datadir,
            spooldir=self.target_directory,
        )

        if self.dry_run:
            return _dry_run(self.target_directory, volume, snapshot, datadir)

        try:
            snapshot.start(volume)
        except CallbackFailuresError as exc:
            for callback, error in exc.errors:
                LOG.error("%s: %s", callback, error)
            raise BackupError(
                "Error occurred during snapshot process. Aborting.")
        except LVMCommandError as exc:
            # Something failed in the snapshot process
            raise BackupError(str(exc))
        except BaseException as ex:
            LOG.debug(ex)

        return None
Пример #15
0
def check_innodb(pathinfo, ensure_subdir_of_datadir=False):
    """Check that all tablespaces are in the datadir and filesystem"""
    is_unsafe_for_lvm = False
    is_unsafe_for_physical_backups = False
    datadir = realpath(pathinfo.datadir)
    datadir_mp = getmount(datadir)
    for tablespace in pathinfo.walk_innodb_shared_tablespaces():
        space_mp = getmount(tablespace)
        if space_mp != datadir_mp:
            LOG.error(
                "InnoDB shared tablespace %s is not on the same "
                "filesystem as the datadir %s",
                tablespace,
                datadir,
            )
            is_unsafe_for_lvm = True
        if ensure_subdir_of_datadir and not is_subdir(tablespace, datadir):
            LOG.error(
                "InnoDB shared tablespace %s is not within a "
                "subdirectory of the datadir %s.",
                tablespace,
                datadir,
            )
            is_unsafe_for_physical_backups = True
    ib_logdir = pathinfo.get_innodb_logdir()
    ib_logdir_mp = getmount(ib_logdir)

    if ib_logdir_mp != datadir_mp:
        LOG.error(
            "innodb-log-group-home-dir %s is not on the same filesystem "
            "as the MySQL datadir %s",
            ib_logdir,
            datadir,
        )
        is_unsafe_for_lvm = True
    if ensure_subdir_of_datadir and not is_subdir(ib_logdir, datadir):
        LOG.error(
            "innodb-log-group-home-dir %s is not a subdirectory of "
            "the datadir %s.",
            ib_logdir,
            datadir,
        )
        is_unsafe_for_physical_backups = True

    if is_unsafe_for_lvm:
        raise BackupError("One or more InnoDB file paths are not on the same "
                          "logical volume as the datadir.  This is unsafe for "
                          "LVM snapshot backups.")
    if is_unsafe_for_physical_backups:
        raise BackupError("One or more InnoDB files are not contained within "
                          "the MySQL datadir. A consistent filesystem backup "
                          "is not supported with this configuration in the "
                          "current plugin version.")
Пример #16
0
    def __call__(self, event, snapshot_fsm, snapshot):
        LOG.info("Starting InnoDB recovery")

        mysqld_exe = locate_mysqld_exe(self.mysqld_config)
        LOG.info("Bootstrapping with %s", mysqld_exe)

        mycnf_path = os.path.join(self.mysqld_config['datadir'],
                                  'my.innodb_recovery.cnf')
        self.mysqld_config['log-error'] = 'innodb_recovery.log'
        my_conf = generate_server_config(self.mysqld_config, mycnf_path)

        mysqld = MySQLServer(mysqld_exe, my_conf)
        mysqld.start(bootstrap=True)

        while mysqld.poll() is None:
            if signal.SIGINT in snapshot_fsm.sigmgr.pending:
                mysqld.kill(signal.SIGKILL)
            time.sleep(0.5)
        LOG.info("%s has stopped", mysqld_exe)

        if mysqld.returncode != 0:
            datadir = self.mysqld_config['datadir']
            for line in open(os.path.join(datadir, 'innodb_recovery.log'),
                             'r'):
                LOG.error("%s", line.rstrip())
            raise BackupError("%s exited with non-zero status (%s) during "
                              "InnoDB recovery" %
                              (mysqld_exe, mysqld.returncode))
        else:
            LOG.info("%s ran successfully", mysqld_exe)
Пример #17
0
 def open_mb_logfile(self):
     """Open a file object to the log output for mariabackup"""
     path = join(self.target_directory, 'mariabackup.log')
     try:
         return open(path, 'a')
     except IOError as exc:
         raise BackupError('[%d] %s' % (exc.errno, exc.strerror))
Пример #18
0
 def open_xb_logfile(self):
     """Open a file object to the log output for xtrabackup"""
     path = join(self.target_directory, "xtrabackup.log")
     try:
         return open(path, "a")
     except IOError as exc:
         raise BackupError("[%d] %s" % (exc.errno, exc.strerror))
Пример #19
0
def generate_defaults_file(defaults_file, include=(), auth_opts=None):
    """Generate a mysql options file

    :param defaults_file: path where options should be written
    :param include: ordered list of additional defaults files to include
    :param auth_opts: dictionary of client options.  may include:
                      user, password, host, port, socket
    """
    LOG.info("* Generating mysql option file: %s", defaults_file)
    try:
        fileobj = codecs.open(defaults_file, 'a', encoding='utf8')
        try:
            for path in include:
                path = expanduser(path)
                LOG.info("  + Added !include %s", path)
                print >> fileobj, '!include ' + path

            if auth_opts:
                need_client_section = True
                for key in ('user', 'password', 'host', 'port', 'socket'):
                    value = auth_opts.get(key)
                    if value is None:
                        continue
                    if need_client_section:
                        LOG.info(
                            "  + Added [client] section with credentials from [mysql:client] section"
                        )
                        print >> fileobj, "[client]"
                        need_client_section = False
                    print >> fileobj, '%s = %s' % (key, value)
        finally:
            fileobj.close()
    except IOError, exc:
        raise BackupError("Failed to create %s: [%d] %s" %
                          (defaults_file, exc.errno, exc.strerror))
Пример #20
0
    def backup(self):
        """Run a MySQL backup"""
        if self.schema.timestamp is None:
            self._fast_refresh_schema()

        try:
            self.client = connect(self.mysql_config["client"])
        except Exception as ex:
            LOG.debug("%s", ex)
            raise BackupError("Failed connecting to database'")

        if self.dry_run:
            self.mock_env = MockEnvironment()
            self.mock_env.replace_environment()
            LOG.info("Running in dry-run mode.")
            status = self.client.show_databases()
            if not status:
                raise BackupError("Failed to run 'show databases'")
        try:
            if self.config["mysqldump"]["stop-slave"]:
                slave_status = self.client.show_slave_status()
                if not slave_status:
                    raise BackupError("stop-slave enabled, but 'show slave "
                                      "status' failed")
                if slave_status and slave_status["slave_sql_running"] != "Yes":
                    raise BackupError("stop-slave enabled, but replication is "
                                      "not running")
                if not self.dry_run:
                    _stop_slave(self.client, self.config)
            elif self.config["mysqldump"]["bin-log-position"]:
                self.config["mysql:replication"] = {}
                repl_cfg = self.config["mysql:replication"]
                try:
                    master_info = self.client.show_master_status()
                    if master_info:
                        repl_cfg["master_log_file"] = master_info["file"]
                        repl_cfg["master_log_pos"] = master_info["position"]
                except MySQLError as exc:
                    raise BackupError(
                        "Failed to acquire master status [%d] %s" % exc.args)
            self._backup()
        finally:
            if self.config["mysqldump"][
                    "stop-slave"] and "mysql:replication" in self.config:
                _start_slave(self.client, self.config["mysql:replication"])
            if self.mock_env:
                self.mock_env.restore_environment()
Пример #21
0
    def backup(self):
        """
        Use the internal '.dump' functionality built into SQLite to dump the
        pure ASCII SQL Text and write that to disk.
        """

        zopts = (
            self.config["compression"]["method"],
            int(self.config["compression"]["level"]),
            self.config["compression"]["options"],
            self.config["compression"]["inline"],
            self.config["compression"]["split"],
        )
        LOG.info("SQLite binary is [%s]", self.sqlite_bin)
        for database in self.databases:
            path = os.path.abspath(os.path.expanduser(database))

            if database in self.invalid_databases:
                LOG.warning("Skipping invalid SQLite database at [%s]", path)
                continue

            if self.dry_run:
                LOG.info("Backing up SQLite database at [%s] (dry run)", path)
                dest = open("/dev/null", "w")
            else:
                LOG.info("Backing up SQLite database at [%s]", path)
                dest = os.path.join(self.target_directory,
                                    "%s.sql" % os.path.basename(path))
                dest = open_stream(dest, "w", *zopts)

            process = Popen(
                [self.sqlite_bin, path, ".dump"],
                stdin=open("/dev/null", "r"),
                stdout=dest,
                stderr=PIPE,
            )
            _, stderroutput = process.communicate()
            dest.close()

            if process.returncode != 0:
                LOG.error(stderroutput)
                raise BackupError("SQLite '.dump' of [%s] failed" % path)

        # Raise for invalid databases after we successfully backup the others
        if self.invalid_databases:
            raise BackupError("Invalid database(s): %s" %
                              self.invalid_databases)
Пример #22
0
def determine_stream_method(stream):
    """Calculate the stream option from the holland config"""
    stream = stream.lower()
    if stream in ("mbstream", "xbstream"):
        return "xbstream"
    if stream in ("no", "0", "false"):
        return None
    raise BackupError("Invalid mariabackup stream method '%s'" % stream)
Пример #23
0
def determine_stream_method(stream):
    """Calculate the stream option from the holland config"""
    stream = stream.lower()
    if stream in ('mbstream', 'xbstream'):
        return 'xbstream'
    if stream in ('no', '0', 'false'):
        return None
    raise BackupError("Invalid mariabackup stream method '%s'" % stream)
Пример #24
0
 def open_mb_stdout(self):
     """Open the stdout output for a streaming mariabackup run"""
     config = self.config["mariabackup"]
     backup_directory = self.target_directory
     stream = util.determine_stream_method(config["stream"])
     if stream:
         if stream == "tar":
             archive_path = join(backup_directory, "backup.tar")
         elif "stream" in stream:
             archive_path = join(backup_directory, "backup.mb")
         else:
             raise BackupError("Unknown stream method '%s'" % stream)
         try:
             return open_stream(archive_path, "w", **self.config["compression"])
         except OSError as exc:
             raise BackupError("Unable to create output file: %s" % exc)
     else:
         return open("/dev/null", "w")
Пример #25
0
def execute_pre_command(pre_command, **kwargs):
    """Execute a pre-command"""
    if not pre_command:
        return

    pre_command = Template(pre_command).safe_substitute(**kwargs)
    LOG.info("Executing pre-command: %s", pre_command)
    try:
        process = Popen(pre_command, stdout=PIPE, stderr=STDOUT, shell=True, close_fds=True)
    except OSError as exc:
        # missing executable
        raise BackupError("pre-command %s failed: %s" % (pre_command, exc.strerror))

    for line in process.stdout:
        LOG.info("  >> %s", line)
    returncode = process.wait()
    if returncode != 0:
        raise BackupError("pre-command exited with failure status [%d]" % returncode)
Пример #26
0
 def _estimate_legacy_size(self, database):
     try:
         connection = get_connection(self.config, database)
         size = legacy_get_db_size(database, connection)
         connection.close()
         return size
     except dbapi.DatabaseError as exc:
         raise BackupError("Failed to estimate database size for %s: %s" %
                           (database, exc))
Пример #27
0
 def estimate_backup_size(self):
     try:
         client = MySQL.from_defaults(self.defaults_path)
     except MySQL.MySQLError as exc:
         raise BackupError('Failed to connect to MySQL [%d] %s' % exc.args)
     try:
         try:
             datadir = client.var('datadir')
             return directory_size(datadir)
         except MySQL.MySQLError as exc:
             raise BackupError("Failed to find mysql datadir: [%d] %s" %
                               exc.args)
         except OSError as exc:
             raise BackupError(
                 'Failed to calculate directory size: [%d] %s' %
                 (exc.errno, exc.strerror))
     finally:
         client.close()
Пример #28
0
def find_mysqldump(path=None):
    """Find a usable mysqldump binary in path or ENV[PATH]"""
    search_path = ':'.join(path) or os.environ.get('PATH', '')
    for _path in search_path.split(':'):
        if os.path.isfile(_path):
            return os.path.realpath(_path)
        if os.path.exists(os.path.join(_path, 'mysqldump')):
            return os.path.realpath(os.path.join(_path, 'mysqldump'))
    raise BackupError("Failed to find mysqldump in %s" % search_path)
Пример #29
0
def determine_stream_method(stream):
    """Calculate the stream option from the holland config"""
    stream = stream.lower()
    if stream in ('yes', '1', 'true', 'tar', 'tar4ibd'):
        return 'tar'
    if stream in ('xbstream', ):
        return 'xbstream'
    if stream in ('no', '0', 'false'):
        return None
    raise BackupError("Invalid xtrabackup stream method '%s'" % stream)
Пример #30
0
def determine_stream_method(stream):
    """Calculate the stream option from the holland config"""
    stream = stream.lower()
    if stream in ("yes", "1", "true", "tar", "tar4ibd"):
        return "tar"
    if stream in ("xbstream", ):
        return "xbstream"
    if stream in ("no", "0", "false"):
        return None
    raise BackupError("Invalid xtrabackup stream method '%s'" % stream)