Beispiel #1
0
 def __init__(self, log_file, output_file, data_dir, uncompressed_archives):
     self.log_manager = log_manager.LogManager()
     self.data_dir = data_dir
     self.stop_watch = timer.Timer()
     self.setup_logging(log_file)
     self.command_executor = CommandExecutor(output_file)
     self.compressed_archives = not uncompressed_archives
Beispiel #2
0
 def __init__(self, log_file, output_file, no_compression):
     self.log_manager = log_manager.LogManager()
     self.stop_watch = timer.Timer()
     self.setup_logging(log_file)
     self.command_executor = CommandExecutor(output_file)
     self.compress = not no_compression
     self.http = HttpManager()
 def __init__(self, log_file, output_file, data_dir, uncompressed_archives):
     self.log_manager = log_manager.LogManager()
     self.data_dir = data_dir
     self.stop_watch = timer.Timer()
     self.setup_logging(log_file)
     self.command_executor = CommandExecutor(output_file)
     self.compressed_archives = not uncompressed_archives
 def __init__(self, log_file, output_file, no_compression, debug=False):
     self.debug = debug
     self.log_manager = log_manager.LogManager()
     self.stop_watch = timer.Timer()
     self.setup_logging(log_file)
     try:
         with open(output_file, 'a+'):
             pass
     except Exception as error:
         self.logger.error('Output file error: %s',
                           str(error),
                           exc_info=self.debug)
         raise
     self.command_executor = CommandExecutor(output_file)
     self.compress = not no_compression
     self.http = HttpManager()
 def __init__(self, log_file, output_file, no_compression):
     self.log_manager = log_manager.LogManager()
     self.stop_watch = timer.Timer()
     self.setup_logging(log_file)
     self.command_executor = CommandExecutor(output_file)
     self.compress = not no_compression
     self.http = HttpManager()
 def __init__(self, log_file, output_file, no_compression, debug=False):
     self.debug = debug
     self.log_manager = log_manager.LogManager()
     self.stop_watch = timer.Timer()
     self.setup_logging(log_file)
     try:
         with open(output_file, 'a+'):
             pass
     except Exception as error:
         self.logger.error('Output file error: %s', str(error),
                           exc_info=self.debug)
         raise
     self.command_executor = CommandExecutor(output_file)
     self.compress = not no_compression
     self.http = HttpManager()
Beispiel #7
0
class RestorationTool:
    def __init__(self, log_file, output_file, data_dir, uncompressed_archives):
        self.log_manager = log_manager.LogManager()
        self.data_dir = data_dir
        self.stop_watch = timer.Timer()
        self.setup_logging(log_file)
        self.command_executor = CommandExecutor(output_file)
        self.compressed_archives = not uncompressed_archives

    def setup_logging(self, log_file):
        self.logger = logging.getLogger(__name__)
        self.log_manager.attach_file_handler(self.logger, log_file)

    def prepare_workdir(self, path):
        self.workdir = path + '/pyxtrabackup-restore'
        filesystem_utils.mkdir_path(self.workdir, 0o755)
        self.logger.debug("Temporary workdir: " + self.workdir)

    def stop_service(self):
        try:
            self.command_executor.exec_manage_service('mysql', 'stop')
        except:
            self.logger.error('Unable to manage MySQL service.', exc_info=True)
            self.clean()
            raise

    def clean_data_dir(self):
        try:
            filesystem_utils.clean_directory(self.data_dir)
        except:
            self.logger.error('Unable to clean MySQL data directory.',
                              exc_info=True)
            self.clean()
            raise

    def restore_base_backup(self, archive_path):
        self.stop_watch.start_timer()
        try:
            self.command_executor.extract_archive(archive_path, self.data_dir,
                                                  self.compressed_archives)
            self.command_executor.exec_backup_preparation(self.data_dir, True)
        except ProcessError:
            self.logger.error(
                'An error occured during the base backup restoration process.',
                exc_info=True)
            self.clean()
            raise
        self.logger.info("Base backup restoration time: %s - Duration: %s",
                         self.stop_watch.stop_timer(),
                         self.stop_watch.duration_in_seconds())

    def restore_incremental_backups(self, incremental_archive):
        try:
            repository, archive_name = filesystem_utils.split_path(
                incremental_archive)
            incremental_target = int(archive_name.split('_')[1])
            for step in range(0, incremental_target + 1):
                self.apply_incremental_backup(repository, step)
        except:
            self.logger.error('An error occured during the incremental\
                 backups restoration process.',
                              exc_info=True)
            self.clean()
            raise

    def apply_incremental_backup(self, archive_repository, incremental_step):
        self.stop_watch.start_timer()
        try:
            prefix = ''.join(['inc_', str(incremental_step), '_'])
            backup_archive = filesystem_utils.get_prefixed_file_in_dir(
                archive_repository, prefix)
            extracted_archive_path = ''.join(
                [self.workdir, '/', prefix, 'archive'])
            filesystem_utils.mkdir_path(extracted_archive_path, 0o755)
            self.command_executor.extract_archive(backup_archive,
                                                  extracted_archive_path,
                                                  self.compressed_archives)
            self.command_executor.exec_incremental_preparation(
                self.data_dir, extracted_archive_path)
        except:
            self.logger.error(
                'An error occured during an incremental backup restoration.',
                exc_info=True)
            self.clean()
            raise
        self.logger.info(
            "Incremental step #%s restoration time: %s\
 - Duration: %s", incremental_step, self.stop_watch.stop_timer(),
            self.stop_watch.duration_in_seconds())

    def prepare_data_dir(self):
        try:
            self.command_executor.exec_backup_preparation(self.data_dir, False)
        except:
            self.logger.error(
                'An error occured during the backup final preparation.',
                exc_info=True)
            self.clean()
            raise
        self.logger.info("Backup final preparation time: %s - Duration: %s",
                         self.stop_watch.stop_timer(),
                         self.stop_watch.duration_in_seconds())

    def set_data_dir_permissions(self):
        try:
            self.command_executor.exec_chown('mysql', 'mysql', self.data_dir)
        except:
            self.logger.error('Unable to reset MySQL data dir permissions.',
                              exc_info=True)
            self.clean()
            raise

    def start_service(self):
        try:
            self.command_executor.exec_manage_service('mysql', 'start')
        except:
            self.logger.error('Unable to manage MySQL service.', exc_info=True)
            self.clean()
            raise

    def clean(self):
        filesystem_utils.delete_directory_if_exists(self.workdir)

    def start_restoration(self, base_archive, incremental_archive, workdir,
                          restart_service):
        self.prepare_workdir(workdir)
        self.stop_service()
        self.clean_data_dir()
        self.restore_base_backup(base_archive)
        self.restore_incremental_backups(incremental_archive)
        self.prepare_data_dir()
        self.set_data_dir_permissions()
        self.clean()
        if restart_service:
            self.start_service()
Beispiel #8
0
class BackupTool:

    def __init__(self, log_file, output_file):
        self.log_manager = log_manager.LogManager()
        self.stop_watch = timer.Timer()
        self.setup_logging(log_file)
        self.command_executor = CommandExecutor(output_file)

    def setup_logging(self, log_file):
        self.logger = logging.getLogger(__name__)
        self.log_manager.attach_file_handler(self.logger, log_file)

    def check_prerequisites(self, repository):
        try:
            filesystem_utils.check_required_binaries(['innobackupex', 'tar'])
            filesystem_utils.check_path_existence(repository)
        except exception.ProgramError:
            self.logger.error('Prerequisites check failed.', exc_info=True)
            raise

    def prepare_workdir(self, path):
        filesystem_utils.mkdir_path(path, 0o755)
        self.workdir = path + '/xtrabackup_tmp'
        self.logger.debug("Temporary workdir: " + self.workdir)
        self.archive_path = path + '/backup.tar.gz'
        self.logger.debug("Temporary archive: " + self.archive_path)

    def prepare_repository(self, repository, incremental):
        if incremental:
            sub_directory = '/INC'
        else:
            sub_directory = ''
        self.backup_repository = filesystem_utils.create_sub_repository(
            repository, sub_directory)

    def prepare_archive_name(self, incremental, incremental_cycle):
        if incremental:
            backup_prefix = ''.join(['inc_', str(self.incremental_step), '_'])
        else:
            if incremental_cycle:
                backup_prefix = 'base_'
            else:
                backup_prefix = ''
        self.final_archive_path = filesystem_utils.prepare_archive_path(
            self.backup_repository, backup_prefix)

    def exec_incremental_backup(self, user, password, thread_count):
        self.stop_watch.start_timer()
        try:
            self.command_executor.exec_incremental_backup(
                user,
                password,
                thread_count,
                self.last_lsn,
                self.workdir)
        except ProcessError:
            self.logger.error(
                'An error occured during the incremental backup process.',
                exc_info=True)
            self.clean()
            raise
        self.logger.info("Incremental backup time: %s - Duration: %s",
                         self.stop_watch.stop_timer(),
                         self.stop_watch.duration_in_seconds())

    def exec_full_backup(self, user, password, thread_count):
        self.stop_watch.start_timer()
        try:
            self.command_executor.exec_filesystem_backup(
                user,
                password,
                thread_count,
                self.workdir)
        except ProcessError:
            self.logger.error(
                'An error occured during the backup process.', exc_info=True)
            self.clean()
            raise
        self.logger.info("Backup time: %s - Duration: %s",
                         self.stop_watch.stop_timer(),
                         self.stop_watch.duration_in_seconds())

    def prepare_backup(self, redo_logs):
        self.stop_watch.start_timer()
        try:
            self.command_executor.exec_backup_preparation(self.workdir,
                                                          redo_logs)
        except ProcessError:
            self.logger.error(
                'An error occured during the preparation process.',
                exc_info=True)
            self.clean()
            raise
        self.logger.info("Backup preparation time: %s - Duration: %s",
                         self.stop_watch.stop_timer(),
                         self.stop_watch.duration_in_seconds())

    def compress_backup(self):
        self.stop_watch.start_timer()
        try:
            self.command_executor.create_archive(
                self.workdir, self.archive_path)
        except ProcessError:
            self.logger.error(
                'An error occured during the backup compression.',
                exc_info=True)
            self.clean()
            raise
        self.logger.info("Backup compression time: %s - Duration: %s",
                         self.stop_watch.stop_timer(),
                         self.stop_watch.duration_in_seconds())

    def transfer_backup(self, repository):
        self.stop_watch.start_timer()
        try:
            self.logger.debug("Archive path: " + self.final_archive_path)
            filesystem_utils.move_file(self.archive_path,
                                       self.final_archive_path)
        except Exception:
            self.logger.error(
                'An error occured during the backup compression.',
                exc_info=True)
            self.clean()
            raise
        self.logger.info("Archive copy time: %s - Duration: %s",
                         self.stop_watch.stop_timer(),
                         self.stop_watch.duration_in_seconds())

    def clean(self):
        filesystem_utils.delete_directory_if_exists(self.workdir)

    def save_incremental_data(self, incremental):
        try:
            if incremental:
                self.incremental_step += 1
            else:
                self.incremental_step = 0
            self.last_lsn = filesystem_utils.retrieve_value_from_file(
                self.workdir + '/xtrabackup_checkpoints',
                '^to_lsn = (\d+)$')
            filesystem_utils.write_array_to_file(
                '/var/tmp/pyxtrabackup-incremental',
                ['BASEDIR=' + self.backup_repository,
                 'LSN=' + self.last_lsn,
                 'INCREMENTAL_STEP=' + str(self.incremental_step)])
        except:
            self.logger.error(
                'Unable to save the incremental backup data.',
                exc_info=True)
            self.clean()
            raise

    def load_incremental_data(self):
        try:
            self.base_dir = filesystem_utils.retrieve_value_from_file(
                '/var/tmp/pyxtrabackup-incremental',
                '^BASEDIR=(.*)$')
            self.last_lsn = filesystem_utils.retrieve_value_from_file(
                '/var/tmp/pyxtrabackup-incremental',
                '^LSN=(\d+)$')
            self.incremental_step = int(
                filesystem_utils.retrieve_value_from_file(
                    '/var/tmp/pyxtrabackup-incremental',
                    '^INCREMENTAL_STEP=(\d+)$'))
        except:
            self.logger.error(
                'Unable to load the incremental backup data.',
                exc_info=True)
            self.clean()
            raise

    def start_full_backup(self, repository, workdir, user, password, threads):
        self.check_prerequisites(repository)
        self.prepare_workdir(workdir)
        self.prepare_repository(repository, False)
        self.prepare_archive_name(False, False)
        self.exec_full_backup(user, password, threads)
        self.prepare_backup(False)
        self.compress_backup()
        self.transfer_backup(repository)
        self.clean()

    def start_incremental_backup(self, repository, incremental,
                                 workdir, user, password, threads):
        self.check_prerequisites(repository)
        self.prepare_workdir(workdir)
        self.prepare_repository(repository, True)
        if incremental:
            self.load_incremental_data()
            self.prepare_archive_name(incremental, True)
            self.exec_incremental_backup(user, password, threads)
        else:
            self.prepare_archive_name(incremental, True)
            self.exec_full_backup(user, password, threads)
        self.save_incremental_data(incremental)
        self.compress_backup()
        self.transfer_backup(repository)
        self.clean()
Beispiel #9
0
 def __init__(self, log_file, output_file):
     self.log_manager = log_manager.LogManager()
     self.stop_watch = timer.Timer()
     self.setup_logging(log_file)
     self.command_executor = CommandExecutor(output_file)
class RestorationTool:

    def __init__(self, log_file, output_file, data_dir, uncompressed_archives):
        self.log_manager = log_manager.LogManager()
        self.data_dir = data_dir
        self.stop_watch = timer.Timer()
        self.setup_logging(log_file)
        self.command_executor = CommandExecutor(output_file)
        self.compressed_archives = not uncompressed_archives

    def setup_logging(self, log_file):
        self.logger = logging.getLogger(__name__)
        self.log_manager.attach_file_handler(self.logger, log_file)

    def prepare_workdir(self, path):
        self.workdir = path + '/pyxtrabackup-restore'
        filesystem_utils.mkdir_path(self.workdir, 0o755)
        self.logger.debug("Temporary workdir: " + self.workdir)

    def stop_service(self):
        try:
            self.command_executor.exec_manage_service('mysql', 'stop')
        except:
            self.logger.error(
                'Unable to manage MySQL service.',
                exc_info=True)
            self.clean()
            raise

    def clean_data_dir(self):
        try:
            filesystem_utils.clean_directory(self.data_dir)
        except:
            self.logger.error(
                'Unable to clean MySQL data directory.',
                exc_info=True)
            self.clean()
            raise

    def restore_base_backup(self, archive_path):
        self.stop_watch.start_timer()
        try:
            self.command_executor.extract_archive(archive_path,
                                                  self.data_dir,
                                                  self.compressed_archives)
            self.command_executor.exec_backup_preparation(self.data_dir, True)
        except ProcessError:
            self.logger.error(
                'An error occured during the base backup restoration process.',
                exc_info=True)
            self.clean()
            raise
        self.logger.info("Base backup restoration time: %s - Duration: %s",
                         self.stop_watch.stop_timer(),
                         self.stop_watch.duration_in_seconds())

    def restore_incremental_backups(self, incremental_archive):
        try:
            repository, archive_name = filesystem_utils.split_path(
                incremental_archive)
            incremental_target = int(archive_name.split('_')[1])
            for step in range(0, incremental_target + 1):
                self.apply_incremental_backup(repository, step)
        except:
            self.logger.error(
                'An error occured during the incremental\
                 backups restoration process.',
                exc_info=True)
            self.clean()
            raise

    def apply_incremental_backup(self, archive_repository, incremental_step):
        self.stop_watch.start_timer()
        try:
            prefix = ''.join(['inc_', str(incremental_step), '_'])
            backup_archive = filesystem_utils.get_prefixed_file_in_dir(
                archive_repository, prefix)
            extracted_archive_path = ''.join([self.workdir, '/',
                                              prefix, 'archive'])
            filesystem_utils.mkdir_path(extracted_archive_path, 0o755)
            self.command_executor.extract_archive(backup_archive,
                                                  extracted_archive_path,
                                                  self.compressed_archives)
            self.command_executor.exec_incremental_preparation(
                self.data_dir,
                extracted_archive_path)
        except:
            self.logger.error(
                'An error occured during an incremental backup restoration.',
                exc_info=True)
            self.clean()
            raise
        self.logger.info("Incremental step #%s restoration time: %s\
 - Duration: %s",
                         incremental_step,
                         self.stop_watch.stop_timer(),
                         self.stop_watch.duration_in_seconds())

    def prepare_data_dir(self):
        try:
            self.command_executor.exec_backup_preparation(self.data_dir, False)
        except:
            self.logger.error(
                'An error occured during the backup final preparation.',
                exc_info=True)
            self.clean()
            raise
        self.logger.info("Backup final preparation time: %s - Duration: %s",
                         self.stop_watch.stop_timer(),
                         self.stop_watch.duration_in_seconds())

    def set_data_dir_permissions(self):
        try:
            self.command_executor.exec_chown('mysql', 'mysql', self.data_dir)
        except:
            self.logger.error('Unable to reset MySQL data dir permissions.',
                              exc_info=True)
            self.clean()
            raise

    def start_service(self):
        try:
            self.command_executor.exec_manage_service('mysql', 'start')
        except:
            self.logger.error(
                'Unable to manage MySQL service.',
                exc_info=True)
            self.clean()
            raise

    def clean(self):
        filesystem_utils.delete_directory_if_exists(self.workdir)

    def start_restoration(self, base_archive, incremental_archive,
                          workdir, restart_service):
        self.prepare_workdir(workdir)
        self.stop_service()
        self.clean_data_dir()
        self.restore_base_backup(base_archive)
        self.restore_incremental_backups(incremental_archive)
        self.prepare_data_dir()
        self.set_data_dir_permissions()
        self.clean()
        if restart_service:
            self.start_service()