def move_data_dir(self) -> bool:
     # Move data_dir to new directory
     tmp_dir = self.backup_options.get("tmp_dir")
     logger.info("Moving MySQL data_dir to {}".format(tmp_dir))
     if os.path.isdir(str(self.backup_options.get("tmp_dir"))):
         rmdir_ = "rm -rf {}".format(tmp_dir)
         ProcessRunner.run_command(rmdir_)
     self.move_to_tmp_dir()
     self.create_empty_data_dir()
     return True
Example #2
0
    def extract_decrypt_from_stream_backup(
        self,
        recent_full_bck: Optional[str] = None,
        recent_inc_bck: Optional[str] = None,
        flag: Optional[bool] = None,
    ) -> None:
        """
        Method for extracting and if necessary decrypting from streamed backup.
        If the recent_full_bck passed then it means you want to extract the full backup.
        If the recent_int_bck passed then it means you want to extract the inc backup.
        """
        # Extract and decrypt streamed full backup prior to executing incremental backup
        file_name = "{}/{}/inc_backup.stream".format(
            self.backup_options.get("inc_dir"), recent_inc_bck)
        file_place_holder = "< {} -C {}/{}".format(
            file_name, self.backup_options.get("inc_dir"), recent_inc_bck)

        if not recent_inc_bck:
            file_name = "{}/{}/full_backup.stream".format(
                self.backup_options.get("full_dir"), recent_full_bck)
            file_place_holder = "< {} -C {}/{}".format(
                file_name, self.backup_options.get("full_dir"),
                recent_full_bck)

        xbstream_command = None

        if self.xbstream_options.get("stream") == "xbstream":
            xbstream_command = "{} {}".format(
                self.xbstream_options.get("xbstream"),
                self.xbstream_options.get("xbstream_options"),
            )
            if (self.encryption_options.get("encrypt")
                    and self.xbstream_options.get("xbs_decrypt") and not flag):
                logger.info(
                    "Using xbstream to extract and decrypt from {}".format(
                        file_name))
                xbstream_command += (
                    " --decrypt={} --encrypt-key={} --encrypt-threads={} ".
                    format(
                        self.encryption_options.get("decrypt"),
                        self.encryption_options.get("encrypt_key"),
                        self.encryption_options.get("encrypt_threads"),
                    ))

        if xbstream_command:
            xbstream_command += file_place_holder
            logger.info(
                "The following xbstream command will be executed {}".format(
                    xbstream_command))
            if self.dry == 0 and isfile(file_name):
                ProcessRunner.run_command(xbstream_command)
Example #3
0
    def decrypter(
        self,
        recent_full_bck: Optional[str] = None,
        xtrabackup_inc_cmd: Optional[str] = None,
        recent_inc_bck: Optional[str] = None,
    ) -> None:
        logger.info("Applying workaround for LP #1444255")
        logger.info("See more -> https://jira.percona.com/browse/PXB-934")
        # With recent PXB 8 it seems to be there is no need for this workaround.
        # Due to this moving this feature to this method and keeping just in case.
        # Deprecated as hell.
        if "encrypt" not in xtrabackup_inc_cmd:  # type: ignore
            return
        if not isfile("{}/{}/xtrabackup_checkpoints.xbcrypt".format(
                self.backup_options.get("full_dir"), recent_full_bck)):
            logger.info("Skipping...")
            return

        xbcrypt_command = "{} -d -k {} -a {}".format(
            self.encryption_options.get("xbcrypt"),
            self.encryption_options.get("encrypt_key"),
            self.encryption_options.get("encrypt"),
        )
        xbcrypt_command_extra = (
            " -i {}/{}/xtrabackup_checkpoints.xbcrypt -o {}/{}/xtrabackup_checkpoints"
        )
        xbcrypt_command += xbcrypt_command_extra.format(
            self.backup_options.get("full_dir"),
            recent_full_bck,
            self.backup_options.get("full_dir"),
            recent_full_bck,
        )

        if recent_inc_bck:
            if not isfile("{}/{}/xtrabackup_checkpoints.xbcrypt".format(
                    self.backup_options.get("inc_dir"), recent_inc_bck)):
                logger.info("Skipping...")
                return
            xbcrypt_command += xbcrypt_command_extra.format(
                self.backup_options.get("inc_dir"),
                recent_inc_bck,
                self.backup_options.get("inc_dir"),
                recent_inc_bck,
            )
        logger.info("The following xbcrypt command will be executed {}".format(
            xbcrypt_command))
        if self.dry == 0:
            ProcessRunner.run_command(xbcrypt_command)
Example #4
0
 def decrypt_backup(self, path: Optional[str],
                    dir_name: Optional[str]) -> Optional[bool]:
     """
     Method for decrypting backups.
     If you use crypted backups it should be decrypted prior preparing.
     :param path: the basedir path i.e full backup dir or incremental dir.
     :param dir_name: the exact name backup folder(likely timestamped folder name).
     :return: None or RuntimeError
     """
     if self.encryption_options.get("decrypt"):
         # The base decryption command
         decr_cmd = "{} --decrypt={} --encrypt-key={} --target-dir={}/{}".format(
             self.backup_options.get("backup_tool"),
             self.encryption_options.get("decrypt"),
             self.encryption_options.get("encrypt_key"),
             path,
             dir_name,
         )
         if self.encryption_options.get("remove_original_comp"):
             decr_cmd += " --remove-original"
         logger.info("Trying to decrypt backup")
         logger.info("Running decrypt command -> {}".format(decr_cmd))
         if self.dry:
             return None
         return ProcessRunner.run_command(decr_cmd)
     return None
Example #5
0
    def decompress_backup(self, path: Optional[str],
                          dir_name: Optional[str]) -> Optional[bool]:
        """
        Method for backup decompression.
        Check if decompression enabled, if it is, decompress
        backup prior prepare.
        :param path: the basedir path i.e full backup dir or incremental dir.
        :param dir_name: the exact name backup folder(likely timestamped folder name).
        :return: None or RuntimeError
        """
        if self.compression_options.get("decompress"):
            # The base decompression command
            dec_cmd = "{} --decompress={} --target-dir={}/{}".format(
                self.backup_options.get("backup_tool"),
                self.compression_options.get("decompress"),
                path,
                dir_name,
            )
            if self.compression_options.get("remove_original_comp"):
                dec_cmd += " --remove-original"

            logger.info("Trying to decompress backup")
            logger.info("Running decompress command -> {}".format(dec_cmd))
            if self.dry:
                return None
            return ProcessRunner.run_command(dec_cmd)
        return None
 def giving_chown(self, data_dir: Optional[str] = None) -> Optional[bool]:
     # Changing owner of data_dir to given user:group
     give_chown = "{} {}".format(
         self.command_options.get("chown_command"),
         self.mysql_options.get("data_dir")
         if data_dir is None else data_dir,
     )
     return ProcessRunner.run_command(give_chown)
 def start_mysql_func(
         self,
         start_tool: Optional[str] = None,
         options: Optional[str] = None) -> Union[None, bool, Exception]:
     # Starting MySQL
     logger.info("Starting MySQL server: ")
     args = (self.command_options.get("start_mysql_command")
             if start_tool is None else start_tool)
     start_command = "{} {}".format(
         args, options) if options is not None else args
     return ProcessRunner.run_command(start_command)
Example #8
0
 def untar_backup(self, recent_bck: str) -> Optional[bool]:
     if self.xbstream_options.get("stream") == "tar":
         full_dir = self.backup_options.get("full_dir")
         untar_cmd = "tar -xf {}/{}/full_backup.tar -C {}/{}".format(
             full_dir, recent_bck, full_dir, recent_bck)
         logger.info(
             "The following tar command will be executed -> {}".format(
                 untar_cmd))
         if self.dry == 0 and os.path.isfile("{}/{}/full_backup.tar".format(
                 full_dir, recent_bck)):
             return ProcessRunner.run_command(untar_cmd)
     return None
 def run_xtra_copyback(self,
                       data_dir: Optional[str] = None) -> Optional[bool]:
     # Running Xtrabackup with --copy-back option
     copy_back = "{} --copy-back {} --target-dir={}/{} --data_dir={}".format(
         self.backup_options.get("backup_tool"),
         self.backup_options.get("xtra_options"),
         self.backup_options.get("full_dir"),
         helpers.get_latest_dir_name(
             str(self.backup_options.get("full_dir"))),
         self.mysql_options.get("data_dir")
         if data_dir is None else data_dir,
     )
     return ProcessRunner.run_command(copy_back)
Example #10
0
    def run_prepare_command(self, base_dir: Optional[str],
                            actual_dir: Optional[str],
                            cmd: Optional[str]) -> Optional[bool]:
        # Decrypt backup
        self.prepare_options.decrypt_backup(base_dir, actual_dir)

        # Decompress backup
        self.prepare_options.decompress_backup(base_dir, actual_dir)

        logger.info("Running prepare command -> {}".format(cmd))
        if self.dry:
            return True
        return ProcessRunner.run_command(cmd)
    def check_mysql_uptime(self, options: Optional[str] = None) -> Optional[bool]:
        """
        Method for checking if MySQL server is up or not.
        :param options: Passed options to connect to MySQL server if None, then going to get it from conf file
        :return: True on success, raise RuntimeError on error.
        """
        if not options:

            status_args = (
                "{} --defaults-file={} "
                "--user={} --password='******' status".format(
                    self.mysql_options.get("mysqladmin"),
                    self.mysql_options.get("mycnf"),
                    self.mysql_options.get("mysql_user"),
                    self.mysql_options.get("mysql_password"),
                )
            )

            if self.mysql_options.get("mysql_socket"):
                status_args += " --socket={}".format(
                    self.mysql_options.get("mysql_socket")
                )
            elif self.mysql_options.get("mysql_host") and self.mysql_options.get(
                "mysql_port"
            ):
                status_args += " --host={}".format(self.mysql_options.get("mysql_host"))
                status_args += " --port={}".format(self.mysql_options.get("mysql_port"))
            else:
                logger.critical(
                    "Neither mysql_socket nor mysql_host and mysql_port are defined in config!"
                )
                raise RuntimeError(
                    "Neither mysql_socket nor mysql_host and mysql_port are defined in config!"
                )
        else:
            status_args = "{} {} status".format(
                self.mysql_options.get("mysqladmin"), options
            )

        # filter out password from argument list
        filtered_args = re.sub("--password='******'?", "--password='******'", status_args)

        logger.info("Running mysqladmin command -> {}".format(filtered_args))

        return ProcessRunner.run_command(status_args)
    def full_backup(self) -> bool:
        """
        Method for taking full backups. It will construct the backup command based on config file.
        :return: True on success.
        :raise:  RuntimeError on error.
        """
        logger.info(
            "starting full backup to {}".format(
                self.builder_obj.backup_options.get("full_dir")
            )
        )
        full_backup_dir = helpers.create_backup_directory(
            str(self.builder_obj.backup_options.get("full_dir"))
        )

        # Creating Full Backup command.
        xtrabackup_cmd = self.builder_obj.full_backup_command_builder(
            full_backup_dir=full_backup_dir
        )

        # Extra checks.
        self.builder_obj.stream_encrypt_compress_tar_checker()

        if self.dry:
            # If it's a dry run, skip running & tagging
            return True

        logger.debug(
            "Starting {}".format(self.builder_obj.backup_options.get("backup_tool"))
        )
        status = ProcessRunner.run_command(xtrabackup_cmd)
        status_str = "OK" if status is True else "FAILED"
        self.add_tag(
            backup_type="Full",
            backup_size=helpers.get_folder_size(full_backup_dir),
            backup_status=status_str,
        )
        return status
 def shutdown_mysql(self) -> Union[None, bool, Exception]:
     # Shut Down MySQL
     logger.info("Shutting Down MySQL server:")
     args = self.command_options.get("stop_mysql_command")
     return ProcessRunner.run_command(args)
    def create_backup_archives(self) -> bool:
        from mysql_autoxtrabackup.backup_prepare.prepare import Prepare

        # Creating .tar.gz archive files of taken backups
        file_list = os.listdir(str(self.backup_options.get("full_dir")))
        for i in file_list:
            if len(file_list) == 1 or i != max(file_list):
                logger.info("Preparing backups prior archiving them...")

                if self.backup_archive_options.get("prepare_archive"):
                    logger.info("Started to prepare backups, prior archiving!")
                    prepare_obj = Prepare(
                        config=self.conf, dry_run=self.dry, tag=self.tag
                    )
                    status = prepare_obj.prepare_inc_full_backups()
                    if status:
                        logger.info(
                            "Backups Prepared successfully... {}".format(status)
                        )

                if self.backup_archive_options.get("move_archive") and (
                    int(str(self.backup_archive_options.get("move_archive"))) == 1
                ):
                    dir_name = (
                        str(self.backup_archive_options.get("archive_dir"))
                        + "/"
                        + i
                        + "_archive"
                    )
                    logger.info(
                        "move_archive enabled. Moving {} to {}".format(
                            self.backup_options.get("backup_dir"), dir_name
                        )
                    )
                    try:
                        shutil.copytree(
                            str(self.backup_options.get("backup_dir")), dir_name
                        )
                    except Exception as err:
                        logger.error("FAILED: Move Archive")
                        logger.error(err)
                        raise
                    else:
                        return True
                else:
                    logger.info(
                        "move_archive is disabled. archiving / compressing current_backup."
                    )
                    # Multi-core tar utilizing pigz.

                    # Pigz default to number of cores available, or 8 if cannot be read.

                    # Test if pigz is available.
                    logger.info("testing for pigz...")
                    status = ProcessRunner.run_command("pigz --version")
                    archive_file = (
                        str(self.backup_archive_options.get("archive_dir"))
                        + "/"
                        + i
                        + ".tar.gz"
                    )
                    if status:
                        logger.info("Found pigz...")
                        # run_tar = "tar cvvf - {} {} | pigz -v > {}" \
                        run_tar = (
                            "tar --use-compress-program=pigz -cvf {} {} {}".format(
                                archive_file,
                                self.backup_options.get("full_dir"),
                                self.backup_options.get("inc_dir"),
                            )
                        )
                    else:
                        # handle file not found error.
                        logger.warning(
                            "pigz executeable not available. Defaulting to singlecore tar"
                        )
                        run_tar = "tar -zcf {} {} {}".format(
                            archive_file,
                            self.backup_options.get("full_dir"),
                            self.backup_options.get("inc_dir"),
                        )
                    status = ProcessRunner.run_command(run_tar)
                    if status:
                        logger.info(
                            "OK: Old full backup and incremental backups archived!"
                        )
                        return True

                    logger.error("FAILED: Archiving ")
                    raise RuntimeError("FAILED: Archiving -> {}".format(run_tar))
        return True
Example #15
0
 def mysql_run_command(self, statement: str) -> bool:
     command = self.create_mysql_client_command(statement=statement)
     return ProcessRunner.run_command(command)
    def inc_backup(self) -> bool:
        """
        Method for taking incremental backups.
        :return: True on success.
        :raise: RuntimeError on error.
        """
        # Get the recent full backup path
        recent_full_bck = helpers.get_latest_dir_name(
            str(self.builder_obj.backup_options.get("full_dir"))
        )
        if not recent_full_bck:
            raise RuntimeError(
                "Failed to get Full backup path. Are you sure you have one?"
            )

        # Get the recent incremental backup path
        recent_inc_bck = helpers.get_latest_dir_name(
            str(self.builder_obj.backup_options.get("inc_dir"))
        )

        # Creating time-stamped incremental backup directory
        inc_backup_dir = helpers.create_backup_directory(
            str(self.builder_obj.backup_options.get("inc_dir"))
        )

        # Check here if stream=tar enabled.
        # Because it is impossible to take incremental backup with streaming tar.
        # raise RuntimeError.
        self.builder_obj.stream_tar_incremental_checker()

        xtrabackup_inc_cmd = self.builder_obj.inc_backup_command_builder(
            recent_full_bck=recent_full_bck,
            inc_backup_dir=inc_backup_dir,
            recent_inc_bck=recent_inc_bck,
        )

        self.builder_obj.extract_decrypt_from_stream_backup(
            recent_full_bck=recent_full_bck, recent_inc_bck=recent_inc_bck
        )

        # Deprecated workaround for LP #1444255
        self.builder_obj.decrypter(
            recent_full_bck=recent_full_bck,
            xtrabackup_inc_cmd=xtrabackup_inc_cmd,
            recent_inc_bck=recent_inc_bck,
        )

        if self.dry:
            # If it's a dry run, skip running & tagging
            return True

        logger.debug(
            "Starting {}".format(self.builder_obj.backup_options.get("backup_tool"))
        )
        status = ProcessRunner.run_command(xtrabackup_inc_cmd)
        status_str = "OK" if status is True else "FAILED"
        self.add_tag(
            backup_type="Inc",
            backup_size=helpers.get_folder_size(inc_backup_dir),
            backup_status=status_str,
        )
        return status
 def create_empty_data_dir(self) -> Union[None, bool, Exception]:
     logger.info("Creating an empty data directory ...")
     makedir = "mkdir {}".format(self.mysql_options.get("data_dir"))
     return ProcessRunner.run_command(makedir)