def add_tag(
        self, backup_type: str, backup_size: Optional[str], backup_status: Optional[str]
    ) -> bool:
        """
        Method for adding backup tags
        :param backup_type: The backup type - Full/Inc
        :param backup_size: The size of the backup in human readable format
        :param backup_status: Status: OK or Status: Failed
        :return: True if no exception
        """
        # skip tagging unless self.tag
        if not self.tag:
            logger.info("TAGGING SKIPPED")
            return True

        # Currently only support Inc and Full types, calculate name based on this
        assert backup_type in (
            "Full",
            "Inc",
        ), "add_tag(): backup_type {}: must be 'Full' or 'Inc'".format(backup_type)
        backup_name = (
            helpers.get_latest_dir_name(
                str(self.builder_obj.backup_options.get("full_dir"))
            )
            if backup_type == "Full"
            else helpers.get_latest_dir_name(
                str(self.builder_obj.backup_options.get("inc_dir"))
            )
        )

        # Calculate more tag fields, create string
        backup_timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
        backup_tag_str = (
            "{bk_name}\t{bk_type}\t{bk_status}\t{bk_timestamp}\t{bk_size}\t'{bk_tag}'\n"
        )

        # Apply tag
        with open(
            "{}/backup_tags.txt".format(
                self.builder_obj.backup_options.get("backup_dir")
            ),
            "a",
        ) as backup_tags_file:
            backup_tag_final = backup_tag_str.format(
                bk_name=backup_name,
                bk_type=backup_type,
                bk_status=backup_status,
                bk_timestamp=backup_timestamp,
                bk_size=backup_size,
                bk_tag=self.tag,
            )

            backup_tags_file.write(backup_tag_final)
        return True
Exemple #2
0
    def prepare_only_full_backup(self) -> Union[None, bool, Exception]:
        recent_bck = helpers.get_latest_dir_name(
            str(self.prepare_options.backup_options.get("full_dir")))
        backup_builder = BackupBuilderChecker(self.conf, dry_run=self.dry)
        if recent_bck:
            apply_log_only = None
            if not os.listdir(
                    str(self.prepare_options.backup_options.get("inc_dir"))):
                logger.info("- - - - Preparing Full Backup - - - -")
                self.prepare_options.untar_backup(recent_bck=recent_bck)
                # Extracting/decrypting from streamed backup and extra checks goes here
                backup_builder.extract_decrypt_from_stream_backup(
                    recent_full_bck=recent_bck)

            else:
                logger.info(
                    "- - - - Preparing Full backup for incrementals - - - -")
                logger.info(
                    "- - - - Final prepare,will occur after preparing all inc backups - - - -"
                )
                time.sleep(3)

                apply_log_only = True
                # Prepare command

            backup_prepare_cmd = self.prepare_options.prepare_command_builder(
                full_backup=recent_bck, apply_log_only=apply_log_only)

            self.run_prepare_command(
                str(self.prepare_options.backup_options.get("full_dir")),
                recent_bck,
                backup_prepare_cmd,
            )
        return True
    def test_get_latest_dir_name(self):
        os.makedirs("tests/DELETE_ME", mode=777, exist_ok=True)
        os.makedirs("tests/DELETE_ME/2021-05-06_11-48-31", mode=777, exist_ok=True)
        os.makedirs("tests/DELETE_ME/2021-05-06_11-47-31", mode=777, exist_ok=True)

        assert (
            helpers.get_latest_dir_name(path=f"{os.path.dirname(__file__)}/DELETE_ME")
            == "2021-05-06_11-48-31"
        )
Exemple #4
0
    def prepare_inc_full_backups(self) -> Union[None, bool, Exception]:
        backup_builder = BackupBuilderChecker(self.conf, dry_run=self.dry)
        if not os.listdir(
                str(self.prepare_options.backup_options.get("inc_dir"))):
            logger.info(
                "- - - - You have no Incremental backups. So will prepare only latest Full backup - - - -"
            )
            return self.prepare_only_full_backup()
        else:
            logger.info("- - - - You have Incremental backups. - - - -")
            recent_bck = helpers.get_latest_dir_name(
                str(self.prepare_options.backup_options.get("full_dir")))

            if self.prepare_only_full_backup():
                logger.info("Preparing Incs: ")
                list_of_dir = sorted(
                    os.listdir(
                        str(self.prepare_options.backup_options.get(
                            "inc_dir"))))
                for inc_backup_dir in list_of_dir:
                    apply_log_only = None
                    if inc_backup_dir != max(
                            os.listdir(
                                str(
                                    self.prepare_options.backup_options.get(
                                        "inc_dir")))):
                        logger.info(
                            "Preparing Incremental backups in sequence. Incremental backup dir/name is {}"
                            .format(inc_backup_dir))
                        apply_log_only = True
                    else:
                        logger.info(
                            "Preparing last Incremental backup, inc backup dir/name is {}"
                            .format(inc_backup_dir))

                        # Extracting/decrypting from streamed backup and extra checks goes here
                        backup_builder.extract_decrypt_from_stream_backup(
                            recent_inc_bck=inc_backup_dir, flag=True)
                    # Prepare command
                    backup_prepare_cmd = self.prepare_options.prepare_command_builder(
                        full_backup=recent_bck,
                        incremental=inc_backup_dir,
                        apply_log_only=apply_log_only,
                    )

                    self.run_prepare_command(
                        str(self.prepare_options.backup_options.get(
                            "inc_dir")),
                        inc_backup_dir,
                        backup_prepare_cmd,
                    )

            logger.info("- - - - The end of the Prepare Stage. - - - -")
            return True
 def run_xtra_copyback(self,
                       data_dir: Optional[str] = None) -> Optional[bool]:
     # Running Xtrabackup with --copy-back option
     copy_back = "{} --copy-back {} --target-dir={}/{} --data_dir={}".format(
         self.backup_options.get("backup_tool"),
         self.backup_options.get("xtra_options"),
         self.backup_options.get("full_dir"),
         helpers.get_latest_dir_name(
             str(self.backup_options.get("full_dir"))),
         self.mysql_options.get("data_dir")
         if data_dir is None else data_dir,
     )
     return ProcessRunner.run_command(copy_back)
    def last_full_backup_date(
        self, path: Optional[str] = None, full_backup_interval: Optional[float] = None
    ) -> bool:
        """
        Check if last full backup date retired or not.
        :return: True if last full backup date older than given interval, False if it is newer.
        """
        # Finding last full backup date from dir/folder name
        full_dir = path or str(self.builder_obj.backup_options.get("full_dir"))
        backup_interval = full_backup_interval or str(
            self.builder_obj.backup_options.get("full_backup_interval")
        )
        max_dir = helpers.get_latest_dir_name(full_dir)

        dir_date = datetime.strptime(str(max_dir), "%Y-%m-%d_%H-%M-%S")
        now = datetime.now()
        return float((now - dir_date).total_seconds()) >= float(backup_interval)
 def copy_back_action(self,
                      options: Optional[str] = None) -> Optional[bool]:
     """
     Function for complete recover/copy-back actions
     :return: True if succeeded. Error if failed.
     """
     try:
         self.check_if_backup_prepared(
             str(self.backup_options.get("full_dir")),
             helpers.get_latest_dir_name(
                 str(self.backup_options.get("full_dir"))),
         )
         self.shutdown_mysql()
         if self.move_data_dir() and self.copy(options=options):
             logger.info("All data copied back successfully. ")
             logger.info("Your MySQL server is UP again")
             return True
     except Exception as err:
         logger.error("{}: {}".format(type(err).__name__, err))
     return None
Exemple #8
0
    def prepare_with_tags(self) -> Optional[bool]:
        # Method for preparing backups based on passed backup tags
        found_backups = BackupPrepareBuilderChecker.parse_backup_tags(
            backup_dir=str(
                self.prepare_options.backup_options.get("backup_dir")),
            tag_name=self.tag,
        )
        recent_bck = helpers.get_latest_dir_name(
            str(self.prepare_options.backup_options.get("full_dir")))
        # I am not going to initialize this object in Prepare class constructor as I thin there is no need.
        backup_builder = BackupBuilderChecker(self.conf, dry_run=self.dry)

        if found_backups[1] == "Full":  # type: ignore
            if recent_bck:
                logger.info("- - - - Preparing Full Backup - - - -")

                # Extracting/decrypting from streamed backup and extra checks goes here.
                backup_builder.extract_decrypt_from_stream_backup(
                    recent_full_bck=recent_bck)

                # Prepare command
                backup_prepare_cmd = self.prepare_options.prepare_command_builder(
                    full_backup=recent_bck)

                self.run_prepare_command(
                    str(self.prepare_options.backup_options.get("full_dir")),
                    recent_bck,
                    backup_prepare_cmd,
                )

        elif found_backups[1] == "Inc":  # type: ignore
            if not os.listdir(
                    str(self.prepare_options.backup_options.get("inc_dir"))):
                logger.info(
                    "- - - - You have no Incremental backups. So will prepare only latest Full backup - - - -"
                )
                self.prepare_only_full_backup()
            else:
                logger.info("- - - - You have Incremental backups. - - - -")
                if self.prepare_only_full_backup():
                    logger.info("Preparing Incs: ")
                    list_of_dir = helpers.sorted_ls(
                        str(self.prepare_options.backup_options.get(
                            "inc_dir")))
                    # Find the index number inside all list for backup(which was found via tag)
                    index_num = list_of_dir.index(
                        found_backups[0])  # type: ignore
                    # Limit the iteration until this found backup
                    for dir_ in list_of_dir[:index_num + 1]:
                        apply_log_only = None
                        if dir_ != found_backups[0]:  # type: ignore
                            logger.info(
                                "Preparing inc backups in sequence. inc backup dir/name is {}"
                                .format(dir_))
                            apply_log_only = True

                        else:
                            logger.info(
                                "Preparing last incremental backup, inc backup dir/name is {}"
                                .format(dir_))

                            # Extracting/decrypting from streamed backup and extra checks goes here
                            backup_builder.extract_decrypt_from_stream_backup(
                                recent_inc_bck=dir_, flag=True)

                        # Prepare command
                        backup_prepare_cmd = (
                            self.prepare_options.prepare_command_builder(
                                full_backup=recent_bck,
                                incremental=dir_,
                                apply_log_only=apply_log_only,
                            ))

                        self.run_prepare_command(
                            str(
                                self.prepare_options.backup_options.get(
                                    "inc_dir")),
                            dir_,
                            backup_prepare_cmd,
                        )

        logger.info("- - - - The end of the Prepare Stage. - - - -")
        return True
    def all_backup(self) -> bool:
        """
        This method at first checks full backup directory, if it is empty takes full backup.
        If it is not empty then checks for full backup time.
        If the recent full backup  is taken 1 day ago, it takes full backup.
        In any other conditions it takes incremental backup.
        """
        # Workaround for circular import dependency error in Python

        # Creating object from CheckEnv class
        check_env_obj = CheckEnv(
            self.conf,
            full_dir=str(self.builder_obj.backup_options.get("full_dir")),
            inc_dir=str(self.builder_obj.backup_options.get("inc_dir")),
        )

        assert check_env_obj.check_all_env() is True, "environment checks failed!"
        if not helpers.get_latest_dir_name(
            str(self.builder_obj.backup_options.get("full_dir"))
        ):
            logger.info(
                "- - - - You have no backups : Taking very first Full Backup! - - - -"
            )

            if self.mysql_cli.mysql_run_command("flush logs") and self.full_backup():
                # Removing old inc backups
                self.clean_inc_backup_dir()

        elif self.last_full_backup_date():
            logger.info(
                "- - - - Your full backup is timeout : Taking new Full Backup! - - - -"
            )

            # Archiving backups
            if self.archive_obj.backup_archive_options.get("archive_dir"):
                logger.info(
                    "Archiving enabled; cleaning archive_dir & archiving previous Full Backup"
                )
                if self.archive_obj.backup_archive_options.get(
                    "archive_max_duration"
                ) or self.archive_obj.backup_archive_options.get("archive_max_size"):
                    self.archive_obj.clean_old_archives()
                self.archive_obj.create_backup_archives()
            else:
                logger.info("Archiving disabled. Skipping!")

            if self.mysql_cli.mysql_run_command("flush logs") and self.full_backup():
                # Removing full backups
                self.clean_full_backup_dir()

                # Removing inc backups
                self.clean_inc_backup_dir()

        else:

            logger.info(
                "- - - - You have a full backup that is less than {} seconds old. - - - -".format(
                    self.builder_obj.backup_options.get("full_backup_interval")
                )
            )
            logger.info(
                "- - - - We will take an incremental one based on recent Full Backup - - - -"
            )

            time.sleep(3)

            # Taking incremental backup
            self.inc_backup()

        return True
    def inc_backup(self) -> bool:
        """
        Method for taking incremental backups.
        :return: True on success.
        :raise: RuntimeError on error.
        """
        # Get the recent full backup path
        recent_full_bck = helpers.get_latest_dir_name(
            str(self.builder_obj.backup_options.get("full_dir"))
        )
        if not recent_full_bck:
            raise RuntimeError(
                "Failed to get Full backup path. Are you sure you have one?"
            )

        # Get the recent incremental backup path
        recent_inc_bck = helpers.get_latest_dir_name(
            str(self.builder_obj.backup_options.get("inc_dir"))
        )

        # Creating time-stamped incremental backup directory
        inc_backup_dir = helpers.create_backup_directory(
            str(self.builder_obj.backup_options.get("inc_dir"))
        )

        # Check here if stream=tar enabled.
        # Because it is impossible to take incremental backup with streaming tar.
        # raise RuntimeError.
        self.builder_obj.stream_tar_incremental_checker()

        xtrabackup_inc_cmd = self.builder_obj.inc_backup_command_builder(
            recent_full_bck=recent_full_bck,
            inc_backup_dir=inc_backup_dir,
            recent_inc_bck=recent_inc_bck,
        )

        self.builder_obj.extract_decrypt_from_stream_backup(
            recent_full_bck=recent_full_bck, recent_inc_bck=recent_inc_bck
        )

        # Deprecated workaround for LP #1444255
        self.builder_obj.decrypter(
            recent_full_bck=recent_full_bck,
            xtrabackup_inc_cmd=xtrabackup_inc_cmd,
            recent_inc_bck=recent_inc_bck,
        )

        if self.dry:
            # If it's a dry run, skip running & tagging
            return True

        logger.debug(
            "Starting {}".format(self.builder_obj.backup_options.get("backup_tool"))
        )
        status = ProcessRunner.run_command(xtrabackup_inc_cmd)
        status_str = "OK" if status is True else "FAILED"
        self.add_tag(
            backup_type="Inc",
            backup_size=helpers.get_folder_size(inc_backup_dir),
            backup_status=status_str,
        )
        return status