def __init__(self, config="/etc/bck.conf", dry_run=0): self.conf = config self.dry = dry_run GeneralClass.__init__(self, self.conf) self.check_env_obj = CheckEnv(self.conf) # If prepare_tool option enabled in config, make backup_tool to use this. if hasattr(self, 'prepare_tool'): self.backup_tool = self.prepare_tool
def all_backup(self): """ This method at first checks full backup directory, if it is empty takes full backup. If it is not empty then checks for full backup time. If the recent full backup is taken 1 day ago, it takes full backup. In any other conditions it takes incremental backup. """ # Workaround for circular import dependency error in Python # Creating object from CheckEnv class check_env_obj = CheckEnv(self.conf, full_dir=self.full_dir, inc_dir=self.inc_dir) if check_env_obj.check_all_env(): if self.recent_full_backup_file() == 0: logger.debug("- - - - You have no backups : Taking very first Full Backup! - - - -") # Flushing Logs if self.mysql_connection_flush_logs(): # Taking fullbackup if self.full_backup(): # Removing old inc backups self.clean_inc_backup_dir() # Copying backups to remote server if hasattr(self, 'remote_conn') and hasattr(self, 'remote_dir') \ and self.remote_conn and self.remote_dir: self.copy_backup_to_remote_host() return True elif self.last_full_backup_date() == 1: logger.debug("- - - - Your full backup is timeout : Taking new Full Backup! - - - -") # Archiving backups if hasattr(self, 'archive_dir'): if (hasattr(self, 'max_archive_duration') and self.max_archive_duration) \ or (hasattr(self, 'max_archive_size') and self.max_archive_size): self.clean_old_archives() self.create_backup_archives() else: logger.debug("Archiving disabled. Skipping!") # Flushing logs if self.mysql_connection_flush_logs(): # Taking fullbackup if self.full_backup(): # Removing full backups self.clean_full_backup_dir() # Removing inc backups self.clean_inc_backup_dir() # Copying backups to remote server if hasattr(self, 'remote_conn') and hasattr(self, 'remote_dir') \ and self.remote_conn and self.remote_dir: self.copy_backup_to_remote_host() return True else: logger.debug("- - - - You have a full backup that is less than {} seconds old. - - - -".format( self.full_backup_interval)) logger.debug("- - - - We will take an incremental one based on recent Full Backup - - - -") time.sleep(3) # Taking incremental backup self.inc_backup() # Copying backups to remote server if hasattr(self, 'remote_conn') and hasattr(self, 'remote_dir') \ and self.remote_conn and self.remote_dir: self.copy_backup_to_remote_host() return True
def wipe_backup_prepare_copyback(self, basedir, keyring_vault=0): """ Method Backup + Prepare and Copy-back actions. It is also going to create slave server from backup of master and start. :param basedir: The basedir path of MySQL :return: Success if no exception raised from methods """ c_count = 0 for options in ConfigGenerator( config=self.conf).options_combination_generator( self.mysql_options): c_count = c_count + 1 options = " ".join(options) if '5.7' in basedir: if keyring_vault == 0: options = options + " " + self.df_mysql_options.format( basedir, c_count) elif keyring_vault == 1: # The keyring_vault options must be provided manually with full path in config. # Such as: --early-plugin-load=keyring_vault=keyring_vault.so,--loose-keyring_vault_config=/sda/vault_server/keyring_vault.cnf # It indicates that there is no need to pass basedir that's why only passing the [--server-id=c_count] options = options + " " + self.df_mysql_options.format( c_count) else: options = options + " " + self.df_mysql_options.format(c_count) logger.debug("*********************************") logger.debug("Starting cycle{}".format(c_count)) logger.debug("Will start MySQL with {}".format(options)) # Passing options to start MySQL if self.clone_obj.wipe_server_all(basedir_path=basedir, options=options): # Specifying directories and passing to WrapperForBackupTest class full_dir = self.backupdir + "/cycle{}".format( c_count) + "/full" inc_dir = self.backupdir + "/cycle{}".format(c_count) + "/inc" backup_obj = WrapperForBackupTest(config=self.conf, full_dir=full_dir, inc_dir=inc_dir, basedir=basedir) # Take backups logger.debug("Started to run run_all_backup()") if backup_obj.run_all_backup(): prepare_obj = WrapperForPrepareTest(config=self.conf, full_dir=full_dir, inc_dir=inc_dir) # Prepare backups logger.debug("Started to run run_prepare_backup()") if prepare_obj.run_prepare_backup(): if hasattr(self, 'make_slaves'): logger.debug( "make_slaves is defined so will create slaves!" ) # Creating slave datadir slave_datadir = self.create_slave_datadir( basedir=basedir, num=1) # Doing some stuff for creating slave server env prepare_obj.run_xtra_copyback( datadir=slave_datadir) prepare_obj.giving_chown(datadir=slave_datadir) slave_full_options = self.prepare_start_slave_options( basedir=basedir, slave_number=1, options=options) prepare_obj.start_mysql_func( start_tool="{}/start_dynamic".format(basedir), options=slave_full_options) # Creating connection file for new node self.create_slave_connection_file(basedir=basedir, num=1) # Creating shutdown file for new node self.create_slave_shutdown_file(basedir=basedir, num=1) # Checking if node is up logger.debug("Pausing a bit here...") sleep(10) chk_obj = CheckEnv(config=self.conf) check_options = "--user={} --socket={}/sock{}.sock".format( 'root', basedir, 1) chk_obj.check_mysql_uptime(options=check_options) # Make this node to be slave mysql_master_client_cmd = RunBenchmark( config=self.conf).get_mysql_conn( basedir=basedir) # Create replication user on master server self.run_sql_create_user(mysql_master_client_cmd) # Drop blank users if PS version is 5.6 from master server if '5.6' in basedir or '5.5' in basedir: self.drop_blank_mysql_users( mysql_master_client_cmd) full_backup_dir = prepare_obj.recent_full_backup_file( ) mysql_slave_client_cmd = RunBenchmark( config=self.conf).get_mysql_conn( basedir=basedir, file_name="cl_node{}".format(1)) # Creating dsns table self.create_dsns_table(mysql_master_client_cmd) # Running change master and some other commands here if self.run_change_master( basedir=basedir, full_backup_dir="{}/{}".format( full_dir, full_backup_dir), mysql_master_client_cmd= mysql_master_client_cmd, mysql_slave_client_cmd= mysql_slave_client_cmd): sleep(10) logger.debug( "Starting actions for second slave here...") # Actions for second slave, it is going to be started from slave backup full_dir_2 = self.backupdir + "/cycle{}".format( c_count) + "/slave_backup" + "/full" inc_dir_2 = self.backupdir + "/cycle{}".format( c_count) + "/slave_backup" + "/inc" # Create config for this slave node1 here logger.debug( "Generating special config file for second slave" ) cnf_obj = ConfigGenerator(config=self.conf) slave_conf_path = self.backupdir + "/cycle{}".format( c_count) if ('5.7' in basedir) and ('2_4_ps_5_7' in self.conf): slave_conf_file = 'xb_2_4_ps_5_7_slave.conf' elif ('5.6' in basedir) and ('2_4_ps_5_6' in self.conf): slave_conf_file = 'xb_2_4_ps_5_6_slave.conf' elif ('5.6' in basedir) and ('2_3_ps_5_6' in self.conf): slave_conf_file = 'xb_2_3_ps_5_6_slave.conf' elif ('5.5' in basedir) and ('2_3_ps_5_5' in self.conf): slave_conf_file = 'xb_2_3_ps_5_5_slave.conf' elif ('5.5' in basedir) and ('2_4_ps_5_5' in self.conf): slave_conf_file = 'xb_2_4_ps_5_5_slave.conf' cnf_obj.generate_config_files( test_path=self.testpath, conf_file=slave_conf_file, basedir=basedir, datadir="{}/node{}".format(basedir, 1), sock_file="{}/sock{}.sock".format(basedir, 1), backup_path=slave_conf_path) # DO backup here backup_obj_2 = WrapperForBackupTest( config="{}/{}".format(slave_conf_path, slave_conf_file), full_dir=full_dir_2, inc_dir=inc_dir_2, basedir=basedir) if backup_obj_2.all_backup(): # DO prepare here prepare_obj_2 = WrapperForPrepareTest( config="{}/{}".format( slave_conf_path, slave_conf_file), full_dir=full_dir_2, inc_dir=inc_dir_2) if prepare_obj_2.run_prepare_backup(): # Removing outside tablespace files if os.path.isfile( '{}/out_ts1.ibd'.format(basedir)): os.remove( '{}/out_ts1.ibd'.format(basedir)) if os.path.isfile( '{}/sysbench_test_db/t1.ibd'. format(basedir)): os.remove('{}/sysbench_test_db/t1.ibd'. format(basedir)) # Creating slave datadir slave_datadir_2 = self.create_slave_datadir( basedir=basedir, num=2) prepare_obj_2.run_xtra_copyback( datadir=slave_datadir_2) prepare_obj_2.giving_chown( datadir=slave_datadir_2) slave_full_options = self.prepare_start_slave_options( basedir=basedir, slave_number=2, options=options) prepare_obj_2.start_mysql_func( start_tool="{}/start_dynamic".format( basedir), options=slave_full_options) # Creating connection file for new node self.create_slave_connection_file( basedir=basedir, num=2) # Creating shutdown file for new node self.create_slave_shutdown_file( basedir=basedir, num=2) logger.debug("Pausing a bit here...") sleep(10) check_options_2 = "--user={} --socket={}/sock{}.sock".format( 'root', basedir, 2) chk_obj.check_mysql_uptime( options=check_options_2) mysql_slave_client_cmd_2 = RunBenchmark( config=self.conf).get_mysql_conn( basedir=basedir, file_name="cl_node{}".format(2)) full_backup_dir_2 = prepare_obj_2.recent_full_backup_file( ) if self.run_change_master( basedir=basedir, full_backup_dir="{}/{}".format( full_dir_2, full_backup_dir_2), mysql_master_client_cmd= mysql_master_client_cmd, mysql_slave_client_cmd= mysql_slave_client_cmd_2, is_slave=True): sleep(10) # Running on master self.run_pt_table_checksum(basedir=basedir) # Shutdown slaves self.slave_shutdown(basedir=basedir, num=1) self.slave_shutdown(basedir=basedir, num=2) sleep(5) else: prepare_obj.copy_back_action(options=options)
def inc_backup(self): # Taking Incremental backup recent_bck = self.recent_full_backup_file() recent_inc = self.recent_inc_backup_file() check_env_obj = CheckEnv(self.conf) # Creating time-stamped incremental backup directory inc_backup_dir = self.create_backup_directory(self.inc_dir) # Checking if there is any incremental backup if recent_inc == 0: # If there is no incremental backup # Taking incremental backup. args = "%s --defaults-file=%s --user=%s --password='******' " \ "--target-dir=%s --incremental-basedir=%s/%s --backup" % \ (self.backup_tool, self.mycnf, self.mysql_user, self.mysql_password, inc_backup_dir, self.full_dir, recent_bck) if hasattr(self, 'mysql_socket'): args += " --socket=%s" % (self.mysql_socket) elif hasattr(self, 'mysql_host') and hasattr(self, 'mysql_port'): args += " --host=%s" % self.mysql_host args += " --port=%s" % self.mysql_port else: logger.critical( "Neither mysql_socket nor mysql_host and mysql_port are not defined in config!" ) return False # Adding compression support for incremental backup if hasattr(self, 'compress'): args += " --compress=%s" % (self.compress) if hasattr(self, 'compress_chunk_size'): args += " --compress-chunk-size=%s" % ( self.compress_chunk_size) if hasattr(self, 'compress_threads'): args += " --compress-threads=%s" % (self.compress_threads) # Adding encryption support for incremental backup if hasattr(self, 'encrypt'): args += " --encrypt=%s" % (self.encrypt) if hasattr(self, 'encrypt_key'): args += " --encrypt-key=%s" % (self.encrypt_key) if hasattr(self, 'encrypt_key_file'): args += " --encrypt_key_file=%s" % (self.encrypt_key_file) if hasattr(self, 'encrypt_threads'): args += " --encrypt-threads=%s" % (self.encrypt_threads) if hasattr(self, 'encrypt_chunk_size'): args += " --encrypt-chunk-size=%s" % (self.encrypt_chunk_size) # Extract and decrypt streamed full backup prior to executing incremental backup if hasattr(self, 'stream') \ and hasattr(self, 'encrypt') \ and hasattr(self, 'xbs_decrypt'): logger.debug( "Using xbstream to extract and decrypt from full_backup.stream!" ) xbstream_command = "%s %s --decrypt=%s --encrypt-key=%s --encrypt-threads=%s " \ "< %s/%s/full_backup.stream -C %s/%s" % ( self.xbstream, self.xbstream_options, self.decrypt, self.encrypt_key, self.encrypt_threads, self.full_dir, recent_bck, self.full_dir, recent_bck ) logger.debug( "The following xbstream command will be executed %s", xbstream_command) if self.dry == 0 and isfile(("%s/%s/full_backup.stream") % (self.full_dir, recent_bck)): status, output = subprocess.getstatusoutput( xbstream_command) if status == 0: logger.debug("XBSTREAM command succeeded.") else: logger.error("XBSTREAM COMMAND FAILED!") time.sleep(5) logger.error(output) return False # Extract streamed full backup prior to executing incremental backup elif hasattr(self, 'stream'): logger.debug( "Using xbstream to extract from full_backup.stream!") xbstream_command = "%s %s < %s/%s/full_backup.stream -C %s/%s" % ( self.xbstream, self.xbstream_options, self.full_dir, recent_bck, self.full_dir, recent_bck) logger.debug( "The following xbstream command will be executed %s", xbstream_command) if self.dry == 0 and isfile(("%s/%s/full_backup.stream") % (self.full_dir, recent_bck)): status, output = subprocess.getstatusoutput( xbstream_command) if status == 0: logger.debug("XBSTREAM command succeeded.") else: logger.error("XBSTREAM COMMAND FAILED!") time.sleep(5) logger.error(output) return False elif 'encrypt' in args: logger.debug("Applying workaround for LP #1444255") xbcrypt_command = "%s -d -k %s -a %s -i %s/%s/xtrabackup_checkpoints.xbcrypt " \ "-o %s/%s/xtrabackup_checkpoints" % \ (self.xbcrypt, self.encrypt_key, self.encrypt, self.full_dir, recent_bck, self.full_dir, recent_bck) logger.debug( "The following xbcrypt command will be executed %s", xbcrypt_command) if self.dry == 0: status, output = subprocess.getstatusoutput( xbcrypt_command) if status == 0: logger.debug(output[-27:]) else: logger.error("XBCRYPT COMMAND FAILED!") time.sleep(5) logger.error(output) return False # Checking if extra options were passed: if hasattr(self, 'xtra_options'): args += " " args += self.xtra_options # Checking if extra backup options were passed: if hasattr(self, 'xtra_backup'): args += " " args += self.xtra_backup # Checking if partial recovery list is available if hasattr(self, 'partial_list'): args += " " args += '--databases="%s"' % (self.partial_list) logger.warning("Partial Backup is enabled!") # Checking if streaming enabled for backups if hasattr(self, 'stream'): args += " " args += '--stream="%s"' % self.stream args += " > %s/inc_backup.stream" % inc_backup_dir logger.warning("Streaming is enabled!") logger.debug("The following backup command will be executed %s", args) if self.dry == 0: status, output = subprocess.getstatusoutput(args) if status == 0: logger.debug(output) #logger.debug(output[-27:]) return True else: logger.error("INCREMENT BACKUP FAILED!") time.sleep(5) logger.error(output) return False else: # If there is already existing incremental backup args = "%s --defaults-file=%s --user=%s --password='******' " \ "--target-dir=%s --incremental-basedir=%s/%s --backup" % \ (self.backup_tool, self.mycnf, self.mysql_user, self.mysql_password, inc_backup_dir, self.inc_dir, recent_inc) if hasattr(self, 'mysql_socket'): args += " --socket=%s" % (self.mysql_socket) elif hasattr(self, 'mysql_host') and hasattr(self, 'mysql_port'): args += " --host=%s" % self.mysql_host args += " --port=%s" % self.mysql_port else: logger.critical( "Neither mysql_socket nor mysql_host and mysql_port are defined in config!" ) return False # Adding compression support for incremental backup if hasattr(self, 'compress'): args += " --compress=%s" % (self.compress) if hasattr(self, 'compress_chunk_size'): args += " --compress_chunk_size=%s" % ( self.compress_chunk_size) if hasattr(self, 'compress-threads'): args += " --compress_threads=%s" % (self.compress_threads) # Adding encryption support for incremental backup if hasattr(self, 'encrypt'): args += " --encrypt=%s" % (self.encrypt) if hasattr(self, 'encrypt_key'): args += " --encrypt-key=%s" % (self.encrypt_key) if hasattr(self, 'encrypt_key_file'): args += " --encrypt-key-file=%s" % (self.encrypt_key_file) if hasattr(self, 'encrypt_threads'): args += " --encrypt-threads=%s" % (self.encrypt_threads) if hasattr(self, 'encrypt_chunk_size'): args += " --encrypt-chunk-size=%s" % (self.encrypt_chunk_size) # Extract and decrypt streamed full backup prior to executing incremental backup if hasattr(self, 'stream') \ and hasattr(self, 'encrypt') \ and hasattr(self, 'xbs_decrypt'): logger.debug( "Using xbstream to extract and decrypt from inc_backup.stream!" ) xbstream_command = "%s %s --decrypt=%s --encrypt-key=%s --encrypt-threads=%s " \ "< %s/%s/inc_backup.stream -C %s/%s" % ( self.xbstream, self.xbstream_options, self.decrypt, self.encrypt_key, self.encrypt_threads, self.inc_dir, recent_inc, self.inc_dir, recent_inc ) logger.debug( "The following xbstream command will be executed %s", xbstream_command) if self.dry == 0 and isfile( ("%s/%s/inc_backup.stream") % (self.inc_dir, recent_inc)): status, output = subprocess.getstatusoutput( xbstream_command) if status == 0: logger.debug("XBSTREAM command succeeded.") else: logger.error("XBSTREAM COMMAND FAILED!") time.sleep(5) logger.error(output) return False # Extracting streamed incremental backup prior to executing new incremental backup elif hasattr(self, 'stream'): logger.debug( "Using xbstream to extract from inc_backup.stream!") xbstream_command = "%s %s < %s/%s/inc_backup.stream -C %s/%s" % ( self.xbstream, self.xbstream_options, self.inc_dir, recent_inc, self.inc_dir, recent_inc) logger.debug( "The following xbstream command will be executed %s", xbstream_command) if self.dry == 0 and isfile( ("%s/%s/inc_backup.stream") % (self.full_dir, recent_bck)): status, output = subprocess.getstatusoutput( xbstream_command) if status == 0: logger.debug("XBSTREAM command succeeded.") else: logger.error("XBSTREAM COMMAND FAILED!") time.sleep(5) logger.error(output) return False elif 'encrypt' in args: logger.debug("Applying workaround for LP #1444255") xbcrypt_command = "%s -d -k %s -a %s -i %s/%s/xtrabackup_checkpoints.xbcrypt " \ "-o %s/%s/xtrabackup_checkpoints" % \ (self.xbcrypt, self.encrypt_key, self.encrypt, self.inc_dir, recent_inc, self.inc_dir, recent_inc) logger.debug( "The following xbcrypt command will be executed %s", xbcrypt_command) if self.dry == 0: status, output = subprocess.getstatusoutput( xbcrypt_command) if status == 0: logger.debug(output[-27:]) else: logger.error("XBCRYPT COMMAND FAILED!") time.sleep(5) logger.error(output) return False # Checking if extra options were passed: if hasattr(self, 'xtra_options'): args += " " args += self.xtra_options # Checking if extra backup options were passed: if hasattr(self, 'xtra_backup'): args += " " args += self.xtra_backup # Checking if partial recovery list is available if hasattr(self, 'partial_list'): args += " " args += '--databases="%s"' % (self.partial_list) logger.warning("Partial Backup is enabled!") # Checking if streaming enabled for backups if hasattr(self, 'stream'): args += " " args += '--stream="%s"' % self.stream args += " > %s/inc_backup.stream" % inc_backup_dir logger.warning("Streaming is enabled!") logger.debug("The following backup command will be executed %s", args) if self.dry == 0: status, output = subprocess.getstatusoutput(args) if status == 0: logger.debug(output) #logger.debug(output[-27:]) return True else: logger.error("INCREMENT BACKUP FAILED!") time.sleep(5) logger.error(output) return False
def __init__(self, config="/etc/bck.conf"): self.conf = config GeneralClass.__init__(self, self.conf) self.check_env_obj = CheckEnv(self.conf) self.result = self.check_env_obj.check_systemd_init()
class Prepare(GeneralClass): def __init__(self, config="/etc/bck.conf"): self.conf = config GeneralClass.__init__(self, self.conf) self.check_env_obj = CheckEnv(self.conf) self.result = self.check_env_obj.check_systemd_init() def recent_full_backup_file(self): # Return last full backup dir name if len(os.listdir(self.full_dir)) > 0: return max(os.listdir(self.full_dir)) else: return 0 def check_inc_backups(self): # Check for Incremental backups if len(os.listdir(self.inc_dir)) > 0: return 1 else: return 0 ############################################################################################################# # PREPARE ONLY FULL BACKUP ############################################################################################################# def prepare_only_full_backup(self): if self.recent_full_backup_file() == 0: logger.debug( "####################################################################################################" ) logger.debug( "You have no FULL backups. First please take FULL backup for preparing - - - - - - - - - - - - - - #" ) logger.debug( "####################################################################################################" ) exit(0) elif self.check_inc_backups() == 0: logger.debug( "################################################################################################" ) logger.debug( "Preparing Full Backup - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #" ) logger.debug( "################################################################################################" ) # Check if decryption enabled if hasattr(self, 'decrypt'): decr = "%s --decrypt=%s --encrypt-key=%s --target-dir=%s/%s" % \ (self.backup_tool, self.decrypt, self.encrypt_key, self.full_dir, self.recent_full_backup_file()) logger.debug("Trying to decrypt backup") logger.debug("Running decrypt command -> %s", decr) status, output = subprocess.getstatusoutput(decr) if status == 0: logger.debug(output[-27:]) logger.debug("Decrypted!") else: logger.error("FULL BACKUP DECRYPT FAILED!") time.sleep(5) logger.error(output) # Check if decompression enabled if hasattr(self, 'decompress'): decmp = "%s --decompress=%s --target-dir=%s/%s" % \ (self.backup_tool, self.decompress, self.full_dir, self.recent_full_backup_file()) logger.debug("Trying to decompress backup") logger.debug("Running decompress command -> %s", decmp) status, output = subprocess.getstatusoutput(decmp) if status == 0: logger.debug(output[-27:]) logger.debug("Decompressed") else: logger.error("FULL BACKUP DECOMPRESSION FAILED!") time.sleep(5) logger.error(output) args = "%s --prepare --target-dir=%s/%s" % \ (self.backup_tool, self.full_dir, self.recent_full_backup_file()) logger.debug("Running prepare command -> %s", args) status, output = subprocess.getstatusoutput(args) if status == 0: logger.debug(output[-27:]) else: logger.error("FULL BACKUP PREPARE FAILED!") time.sleep(5) logger.error(output) return False else: logger.debug( "Preparing Full backup 1 time. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -#\n" "Final prepare,will occur after preparing all inc backups - - - - - - - - - - - - - - - - - - - - -#" ) logger.debug( "####################################################################################################" ) time.sleep(3) # Check if decryption enabled if hasattr(self, 'decrypt'): decr = "%s --decrypt=%s --encrypt-key=%s --target-dir=%s/%s" % \ (self.backup_tool, self.decrypt, self.encrypt_key, self.full_dir, self.recent_full_backup_file()) logger.debug("Trying to decrypt backup") logger.debug("Running decrypt command -> %s", decr) status, output = subprocess.getstatusoutput(decr) if status == 0: logger.debug(output[-27:]) logger.debug("Decrypted!") else: logger.error("FULL BACKUP DECRYPT FAILED!") time.sleep(5) logger.error(output) # Check if decompression enabled, if it is, decompress backup prior prepare if hasattr(self, 'decompress'): decmp = "%s --decompress=%s --target-dir=%s/%s" % \ (self.backup_tool, self.decompress, self.full_dir, self.recent_full_backup_file()) logger.debug("Trying to decompress backup") logger.debug("Running decompress command -> %s", decmp) status, output = subprocess.getstatusoutput(decmp) if status == 0: logger.debug(output[-27:]) logger.debug("Decompressed") else: logger.error("FULL BACKUP DECOMPRESSION FAILED!") time.sleep(5) logger.error(output) args = '%s --prepare %s --target-dir=%s/%s' % \ (self.backup_tool, self.xtrabck_prepare, self.full_dir, self.recent_full_backup_file()) logger.debug("Running prepare command -> %s", args) status, output = subprocess.getstatusoutput(args) if status == 0: logger.debug(output[-27:]) return True else: logger.error("One time FULL BACKUP PREPARE FAILED!") time.sleep(5) logger.error(output) return False ############################################################################################################## # PREPARE INC BACKUPS ############################################################################################################## def prepare_inc_full_backups(self): if self.check_inc_backups() == 0: logger.debug( "################################################################################################" ) logger.debug( "You have no Incremental backups. So will prepare only latest Full backup - - - - - - - - - - - #" ) logger.debug( "################################################################################################" ) time.sleep(3) self.prepare_only_full_backup() else: logger.debug( "####################################################################################################" ) logger.debug( "You have Incremental backups. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -#" ) time.sleep(3) if self.prepare_only_full_backup(): logger.debug( "####################################################################################################" ) logger.debug("Preparing Incs: ") time.sleep(3) list_of_dir = sorted(os.listdir(self.inc_dir)) for i in list_of_dir: if i != max(os.listdir(self.inc_dir)): logger.debug( "Preparing inc backups in sequence. inc backup dir/name is %s" % i) logger.debug( "####################################################################################################" ) time.sleep(3) # Check if decryption enabled if hasattr(self, 'decrypt'): decr = "%s --decrypt=%s --encrypt-key=%s --target-dir=%s/%s" % \ (self.backup_tool, self.decrypt, self.encrypt_key, self.inc_dir, i) logger.debug("Trying to decrypt backup") logger.debug("Running decrypt command -> %s", decr) status, output = subprocess.getstatusoutput(decr) if status == 0: logger.debug(output[-27:]) logger.debug("Decrypted!") else: logger.error("FULL BACKUP DECRYPT FAILED!") time.sleep(5) logger.error(output) # Check if decompression enabled, if it is, decompress backup prior prepare if hasattr(self, 'decompress'): decmp = "%s --decompress=%s --target-dir=%s/%s" % \ (self.backup_tool, self.decompress, self.inc_dir, i) logger.debug("Trying to decompress backup") logger.debug("Running decompress command -> %s", decmp) status, output = subprocess.getstatusoutput(decmp) if status == 0: logger.debug(output[-27:]) logger.debug("Decompressed") else: logger.error( "INCREMENTAL BACKUP DECOMPRESSION FAILED!") time.sleep(5) logger.error(output) args = '%s --prepare %s --target-dir=%s/%s --incremental-dir=%s/%s' % \ (self.backup_tool, self.xtrabck_prepare, self.full_dir, self.recent_full_backup_file(), self.inc_dir, i) logger.debug("Running prepare command -> %s", args) status, output = subprocess.getstatusoutput(args) if status == 0: logger.debug(output[-27:]) else: logger.error("Incremental BACKUP PREPARE FAILED!") time.sleep(5) logger.error(output) return False else: logger.debug( "####################################################################################################" ) logger.debug( "Preparing last incremental backup, inc backup dir/name is %s" % i) logger.debug( "####################################################################################################" ) time.sleep(3) # Check if decryption enabled if hasattr(self, 'decrypt'): decr = "%s --decrypt=%s --encrypt-key=%s --target-dir=%s/%s" % \ (self.backup_tool, self.decrypt, self.encrypt_key, self.inc_dir, i) logger.debug("Trying to decrypt backup") logger.debug("Running decrypt command -> %s", decr) status, output = subprocess.getstatusoutput(decr) if status == 0: logger.debug(output[-27:]) logger.debug("Decrypted!") else: logger.error("FULL BACKUP DECRYPT FAILED!") time.sleep(5) logger.error(output) # Check if decompression enabled, if it is, decompress backup prior prepare if hasattr(self, 'decompress'): decmp = "%s --decompress=%s --target-dir=%s/%s" % \ (self.backup_tool, self.decompress, self.inc_dir, i) logger.debug("Trying to decompress backup") logger.debug("Running decompress command -> %s", decmp) status, output = subprocess.getstatusoutput(decmp) if status == 0: logger.debug(output[-27:]) logger.debug("Decompressed") else: logger.error( "INCREMENTAL BACKUP DECOMPRESSION FAILED!") time.sleep(5) logger.error(output) args2 = '%s --prepare --target-dir=%s/%s --incremental-dir=%s/%s' % \ (self.backup_tool, self.full_dir, self.recent_full_backup_file(), self.inc_dir, i) logger.debug("Running prepare command -> %s", args2) status2, output2 = subprocess.getstatusoutput(args2) if status2 == 0: logger.debug(output2[-27:]) else: logger.error("Incremental BACKUP PREPARE FAILED!") time.sleep(5) logger.error(output2) return False logger.debug( "####################################################################################################" ) logger.debug( "The end of the Prepare Stage. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -#" ) logger.debug( "####################################################################################################" ) time.sleep(3) ############################################################################################################# # COPY-BACK PREPARED BACKUP ############################################################################################################# def shutdown_mysql(self): # Shut Down MySQL logger.debug( "####################################################################################################" ) logger.debug( "Shutting Down MySQL server: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -#" ) logger.debug( "####################################################################################################" ) time.sleep(3) if self.result == 3: args = self.systemd_stop_mariadb elif self.result == 4: args = self.stop_mysql elif self.result == 5: args = self.systemd_stop_mysql elif self.result == 6: args = self.stop_mysql status, output = subprocess.getstatusoutput(args) if status == 0: logger.debug(output) return True else: logger.deberrorug("Could not Shutdown MySQL!") logger.error("Refer to MySQL Error log file") logger.error(output) return False def move_datadir(self): # Move datadir to new directory logger.debug( "####################################################################################################" ) logger.debug( "Moving MySQL datadir to /tmp/mysql: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -#" ) logger.debug( "####################################################################################################" ) time.sleep(3) if os.path.isdir(self.tmpdir): rmdirc = 'rm -rf %s' % self.tmpdir status, output = subprocess.getstatusoutput(rmdirc) if status == 0: logger.debug("Emptied /tmp/mysql directory ...") try: shutil.move(self.datadir, self.tmp) logger.debug("Moved datadir to /tmp/mysql ...") except shutil.Error as err: logger.error("Error occurred while moving datadir") logger.error(err) return False logger.debug("Creating an empty data directory ...") makedir = "mkdir %s" % (self.datadir) status2, output2 = subprocess.getstatusoutput(makedir) if status2 == 0: logger.debug("Datadir is Created! ...") else: logger.error("Error while creating datadir") logger.error(output2) return False return True else: logger.error("Could not delete /tmp/mysql directory") logger.error(output) return False else: try: shutil.move(self.datadir, self.tmp) logger.debug("Moved datadir to /tmp/mysql ...") except shutil.Error as err: logger.error("Error occurred while moving datadir") logger.error(err) return False logger.debug("Creating an empty data directory ...") makedir = "mkdir %s" % (self.datadir) status2, output2 = subprocess.getstatusoutput(makedir) if status2 == 0: logger.debug("Datadir is Created! ...") return True else: logger.error("Error while creating datadir") logger.error(output2) return False def run_xtra_copyback(self): # Running Xtrabackup with --copy-back option copy_back = '%s --copy-back --target-dir=%s/%s --datadir=%s' % \ (self.backup_tool, self.full_dir, self.recent_full_backup_file(), self.datadir) status, output = subprocess.getstatusoutput(copy_back) if status == 0: logger.debug( "####################################################################################################" ) logger.debug( "Data copied back successfully! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #" ) logger.debug( "####################################################################################################" ) return True else: logger.error("Error occurred while copying back data!") logger.error(output) return False def giving_chown(self): # Changing owner of datadir to mysql:mysql time.sleep(3) give_chown = "%s %s" % (self.chown_command, self.datadir) status, output = subprocess.getstatusoutput(give_chown) if status == 0: logger.debug( "####################################################################################################" ) logger.debug( "New copied-back data now owned by specified user! - - - - - - - - - - - - - - - - - - - - - - - - - - -#" ) logger.debug( "####################################################################################################" ) return True else: logger.error("Error occurred while changing owner!") logger.error(output) return False def start_mysql_func(self): # Starting MySQL/Mariadb logger.debug( "####################################################################################################" ) logger.debug("Starting MySQL/MariaDB server: ") logger.debug( "####################################################################################################" ) time.sleep(3) if self.result == 3: args = self.systemd_start_mariadb elif self.result == 4: args = self.start_mysql elif self.result == 5: args = self.systemd_start_mysql elif self.result == 6: args = self.start_mysql start_command = args status, output = subprocess.getstatusoutput(start_command) if status == 0: logger.debug("Starting MySQL ...") logger.debug(output) return True else: logger.error("Error occurred while starting MySQL!") logger.error(output) return False def copy(self): logger.debug( "####################################################################################################" ) logger.debug( "Copying Back Already Prepared Final Backup: - - - - - - - - - - - - - - - - - - - - - - - - - - - -#" ) logger.debug( "####################################################################################################" ) time.sleep(3) if len(os.listdir(self.datadir)) > 0: logger.debug("MySQL Datadir is not empty!") return False else: if self.run_xtra_copyback(): if self.giving_chown(): if self.start_mysql_func(): return True else: "Error Occurred!" def copy_back(self): if self.shutdown_mysql(): if self.move_datadir(): if self.copy(): logger.debug( "####################################################################################################" ) logger.debug( "All data copied back successfully your MySQL server is UP again. \n" "Congratulations. \n" "Backups are life savers") logger.debug( "####################################################################################################" ) return True else: logger.error("Error Occurred!") ############################################################################################################## # FINAL FUNCTION FOR CALL: PREPARE/PREPARE AND COPY-BACK/COPY-BACK ############################################################################################################## def prepare_backup_and_copy_back(self): # Recovering/Copying Back Prepared Backup #print("#####################################################################################################") print( "+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-" ) print("") print("Preparing full/inc backups!") print("What do you want to do?") print( "1. Prepare Backups and keep for future usage. NOTE('Once Prepared Backups Can not be prepared Again')" ) print("2. Prepare Backups and restore/recover/copy-back immediately") print("3. Just copy-back previously prepared backups") prepare = int( input("Please Choose one of options and type 1 or 2 or 3: ")) print("") print( "+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-" ) time.sleep(3) #print("####################################################################################################") if prepare == 1: self.prepare_inc_full_backups() elif prepare == 2: self.prepare_inc_full_backups() self.copy_back() elif prepare == 3: self.copy_back() else: print("Please type 1 or 2 or 3 and nothing more!") # a = Prepare() # a.prepare_backup_and_copy_back()
def all_backup(self): """ This function at first checks full backup directory, if it is empty takes full backup. If it is not empty then checks for full backup time. If the recent full backup is taken 1 day ago, it takes full backup. In any other conditions it takes incremental backup. """ # Workaround for circular import dependency error in Python # Creating object from CheckEnv class check_env_obj = CheckEnv(self.conf) if check_env_obj.check_all_env(): if self.recent_full_backup_file() == 0: logger.debug( "###############################################################" ) logger.debug( "#You have no backups : Taking very first Full Backup! - - - - #" ) logger.debug( "###############################################################" ) time.sleep(3) # Flushing Logs if self.mysql_connection_flush_logs(): # Taking fullbackup if self.full_backup(): # Removing old inc backups self.clean_inc_backup_dir() # Copying backups to remote server if hasattr(self, 'remote_conn') and hasattr( self, 'remote_dir') and self.remote_conn and self.remote_dir: self.copy_backup_to_remote_host() # Exiting after taking full backup exit(0) elif self.last_full_backup_date() == 1: logger.debug( "################################################################" ) logger.debug( "Your full backup is timeout : Taking new Full Backup!- - - - - #" ) logger.debug( "################################################################" ) time.sleep(3) # Archiving backups if self.archive_dir: if (hasattr(self, 'max_archive_duration') and self.max_archive_duration) or ( hasattr(self, 'max_archive_size') and self.max_archive_size): self.clean_old_archives() if not self.create_backup_archives(): exit(0) # Flushing logs if self.mysql_connection_flush_logs(): # Taking fullbackup if self.full_backup(): # Removing full backups self.clean_full_backup_dir() # Removing inc backups self.clean_inc_backup_dir() # Copying backups to remote server if hasattr(self, 'remote_conn') and hasattr( self, 'remote_dir') and self.remote_conn and self.remote_dir: self.copy_backup_to_remote_host() # Exiting after taking NEW full backup exit(0) else: logger.debug( "################################################################" ) logger.debug( "You have a full backup that is less than %d seconds old. - -#", self.full_backup_interval) logger.debug( "We will take an incremental one based on recent Full Backup - -#" ) logger.debug( "################################################################" ) time.sleep(3) # Taking incremental backup self.inc_backup() # Copying backups to remote server if hasattr(self, 'remote_conn') and hasattr( self, 'remote_dir') and self.remote_conn and self.remote_dir: self.copy_backup_to_remote_host() # Exiting after taking Incremental backup exit(0)
def inc_backup(self): # Taking Incremental backup recent_bck = self.recent_full_backup_file() recent_inc = self.recent_inc_backup_file() check_env_obj = CheckEnv() product_type = check_env_obj.check_mysql_product() # Creating time-stamped incremental backup directory inc_backup_dir = self.create_backup_directory(self.inc_dir) # Checking if there is any incremental backup if recent_inc == 0: # If there is no incremental backup # If you have a question why we check whether MariaDB or MySQL installed? # See BUG -> https://bugs.launchpad.net/percona-xtrabackup/+bug/1444541 if product_type == 2: # Taking incremental backup with MariaDB. (--incremental-force-scan option will be added for BUG workaround) args = "%s --defaults-file=%s --user=%s --password='******' " \ "--incremental-force-scan --incremental %s --incremental-basedir %s/%s" % \ (self.backup_tool, self.mycnf, self.mysql_user, self.mysql_password, self.inc_dir, self.full_dir, recent_bck) elif product_type == 3: # Taking incremental backup with MySQL. args = "%s --defaults-file=%s --user=%s --password='******' " \ "--target-dir=%s --incremental-basedir=%s/%s --backup" % \ (self.backup_tool, self.mycnf, self.mysql_user, self.mysql_password, inc_backup_dir, self.full_dir, recent_bck) if hasattr(self, 'mysql_socket'): args += " --socket=%s" % (self.mysql_socket) elif hasattr(self, 'mysql_host') and hasattr(self, 'mysql_port'): args += " --host=%s" % self.mysql_host args += " --port=%s" % self.mysql_port else: logger.critical( "Neither mysql_socket nor mysql_host and mysql_port are defined in config!" ) return False # Adding compression support for incremental backup if hasattr(self, 'compress'): args += " --compress=%s" % (self.compress) if hasattr(self, 'compress_chunk_size'): args += " --compress-chunk-size=%s" % ( self.compress_chunk_size) if hasattr(self, 'compress_threads'): args += " --compress-threads=%s" % (self.compress_threads) # Adding encryption support for incremental backup if hasattr(self, 'encrypt'): args += " --encrypt=%s" % (self.encrypt) if hasattr(self, 'encrypt_key'): args += " --encrypt-key=%s" % (self.encrypt_key) if hasattr(self, 'encrypt_key_file'): args += " --encrypt_key_file=%s" % (self.encrypt_key_file) if hasattr(self, 'encrypt_threads'): args += " --encrypt-threads=%s" % (self.encrypt_threads) if hasattr(self, 'encrypt_chunk_size'): args += " --encrypt-chunk-size=%s" % (self.encrypt_chunk_size) if 'encrypt' in args: logger.debug("Applying workaround for LP #1444255") xbcrypt_command = "%s -d -k %s -a %s -i %s/%s/xtrabackup_checkpoints.xbcrypt " \ "-o %s/%s/xtrabackup_checkpoints" % \ (self.xbcrypt, self.encrypt_key, self.encrypt, self.full_dir, recent_bck, self.full_dir, recent_bck) logger.debug( "The following xbcrypt command will be executed %s", xbcrypt_command) status, output = subprocess.getstatusoutput(xbcrypt_command) if status == 0: logger.debug(output[-27:]) else: logger.error("XBCRYPT COMMAND FAILED!") time.sleep(5) logger.error(output) return False logger.debug("The following backup command will be executed %s", args) status, output = subprocess.getstatusoutput(args) if status == 0: logger.debug(output[-27:]) return True else: logger.error("INCREMENT BACKUP FAILED!") time.sleep(5) logger.error(output) return False else: # If there is already existing incremental backup if product_type == 2: # Taking incremental backup with MariaDB. (--incremental-force-scan option will be added for BUG workaround) args = "%s --defaults-file=%s --user=%s --password='******' " \ "--incremental-force-scan --incremental %s --incremental-basedir %s/%s" % \ (self.backup_tool, self.mycnf, self.mysql_user, self.mysql_password, self.inc_dir, self.inc_dir, recent_inc) elif product_type == 3: args = "%s --defaults-file=%s --user=%s --password='******' " \ "--target-dir=%s --incremental-basedir=%s/%s --backup" % \ (self.backup_tool, self.mycnf, self.mysql_user, self.mysql_password, inc_backup_dir, self.inc_dir, recent_inc) if hasattr(self, 'mysql_socket'): args += " --socket=%s" % (self.mysql_socket) elif hasattr(self, 'mysql_host') and hasattr(self, 'mysql_port'): args += " --host=%s" % self.mysql_host args += " --port=%s" % self.mysql_port else: logger.critical( "Neither mysql_socket nor mysql_host and mysql_port are defined in config!" ) return False # Adding compression support for incremental backup if hasattr(self, 'compress'): args += " --compress=%s" % (self.compress) if hasattr(self, 'compress_chunk_size'): args += " --compress_chunk_size=%s" % ( self.compress_chunk_size) if hasattr(self, 'compress-threads'): args += " --compress_threads=%s" % (self.compress_threads) # Adding encryption support for incremental backup if hasattr(self, 'encrypt'): args += " --encrypt=%s" % (self.encrypt) if hasattr(self, 'encrypt_key'): args += " --encrypt-key=%s" % (self.encrypt_key) if hasattr(self, 'encrypt_key_file'): args += " --encrypt-key-file=%s" % (self.encrypt_key_file) if hasattr(self, 'encrypt_threads'): args += " --encrypt-threads=%s" % (self.encrypt_threads) if hasattr(self, 'encrypt_chunk_size'): args += " --encrypt-chunk-size=%s" % (self.encrypt_chunk_size) if 'encrypt' in args: logger.debug("Applying workaround for LP #1444255") xbcrypt_command = "%s -d -k %s -a %s -i %s/%s/xtrabackup_checkpoints.xbcrypt " \ "-o %s/%s/xtrabackup_checkpoints" % \ (self.xbcrypt, self.encrypt_key, self.encrypt, self.inc_dir, recent_inc, self.inc_dir, recent_inc) logger.debug( "The following xbcrypt command will be executed %s", xbcrypt_command) status, output = subprocess.getstatusoutput(xbcrypt_command) if status == 0: logger.debug(output[-27:]) else: logger.error("XBCRYPT COMMAND FAILED!") time.sleep(5) logger.error(output) return False logger.debug("The following backup command will be executed %s", args) status, output = subprocess.getstatusoutput(args) if status == 0: logger.debug(output[-27:]) return True else: logger.error("INCREMENT BACKUP FAILED!") time.sleep(5) logger.error(output) return False