def remove_files(objectives): try: delete_command = 'rm -rf ' + objectives execution_message = SubprocessExecution.main_execution_function( SubprocessExecution(), delete_command) return SubprocessExecution.print_output(SubprocessExecution(), execution_message) except Exception as e: e.args += (execution_message, ) raise
def execute(self): """Execute commands after subtitution.""" from execution.subprocess_execution import SubprocessExecution files_to_upload = [ f for f in listdir(self.__args.OBJECTIVES) if isfile(join(self.__args.OBJECTIVES, f)) ] # Not very elegant, change later. if self.__args.DESTINATION == 'local': FilesystemHandling.create_directory( self.__custom_command_dict['LOCAL_BACKUP']) # Loop through files in "objectives". for file_to_upload in files_to_upload: self.__custom_command_dict["file"] = file_to_upload count = 1 time_retry = 60 execution_message = [] while count <= 5: print 'Trying upload attempt number: ' + str(count) try: command = self.__args.UPLOAD_COMMAND_TEMPLATE % self.__custom_command_dict except Exception, e: print "Check your ARGS_DICT parameter." print "The upload string was :" print self.__args.UPLOAD_COMMAND_TEMPLATE print "If you are using default templates check the templates file at:" print "%s/%s" % (self.__args.HOME_FOLDER, self.__args.DEFAULT_TEMPLATE_FILE) print e exit(1) print "Executing external command: %s " % command tmp_execution_message = SubprocessExecution.main_execution_function( SubprocessExecution(), command) count = count + 1 time_retry = time_retry * count if tmp_execution_message[0] == 0: print 'Upload attempt ' + str(count) + ' successful.' break else: print 'Upload attempt number: ' + str( count) + ' FAILED for: ' + command print 'StdOut: ' + str(tmp_execution_message[0]) print 'StdErr: ' + str(tmp_execution_message[0]) print 'We will wait for: ' + str(time_retry/60) + ' minute(s) before upload attempt number: ' + \ str(count + 1) time.sleep(time_retry) execution_message.append(tmp_execution_message)
def encrypt(self, in_file, out_file, password, key_length=32, python_version='2.7', home_folder=''): # print python_version print password if python_version == '2.7': with open(password, 'r') as key_file: password = key_file.read().replace('\n', '') bs = AES.block_size salt = Random.new().read(bs - len('Salted__')) key, iv = self.__derive_key_and_iv(password, salt, key_length, bs) cipher = AES.new(key, AES.MODE_CBC, iv) out_file.write('Salted__' + salt) finished = False while not finished: chunk = in_file.read(1024 * bs) if len(chunk) == 0 or len(chunk) % bs != 0: padding_length = bs - (len(chunk) % bs) chunk += padding_length * chr(padding_length) finished = True out_file.write(cipher.encrypt(chunk)) elif python_version == '2.6': command_encrypt = 'cat ' + password + ' ' + in_file.name + \ ' | /usr/bin/gpg-agent --daemon gpg2 --cipher-algo AES-128 --batch --yes --no-tty ' \ '--quiet -c --passphrase-fd 0 > ' + out_file.name execution_encryption = SubprocessExecution.main_execution_function(SubprocessExecution(), command_encrypt, True) # print execution_encrytion if execution_encryption[0] == 0: return out_file.name
def decrypt(self, in_file, out_file, password, key_length=32, home_folder=''): if python_version == '2.7': bs = AES.block_size salt = in_file.read(bs)[len('Salted__'):] key, iv = self.__derive_key_and_iv(password, salt, key_length, bs) cipher = AES.new(key, AES.MODE_CBC, iv) next_chunk = '' finished = False print out_file # print in_file while not finished: chunk, next_chunk = next_chunk, cipher.decrypt(in_file.read(1024 * bs)) if len(next_chunk) == 0: padding_length = ord(chunk[-1]) if padding_length < 1 or padding_length > bs: raise ValueError("bad decrypt pad (%d)" % padding_length) # all the pad-bytes must be the same if chunk[-padding_length:] != (padding_length * chr(padding_length)): # this is similar to the bad decrypt:evp_enc.c from openssl program raise ValueError("bad decrypt") chunk = chunk[:-padding_length] finished = True out_file.write(chunk) return out_file.name elif python_version == '2.6': command_decrypt = 'echo ' + password + ' ' \ '| gpg-agent --quiet --daemon gpg2 --batch --yes -d --passphrase-fd 0 -o ' \ + out_file.name + ' ' \ + in_file.name execution_decryption = SubprocessExecution.main_execution_function(SubprocessExecution(), command_decrypt, True) if execution_decryption[0] == 0: return out_file.name
def compression_execution(self, objectives, destination): """Execute compression of files""" objectives_list = objectives.split() datetime_string = time.strftime("%Y%m%d_%H%M%S") # print objectives_list tar_command = self.__tar_program + ' ' + destination + '/filesbackup_' \ + datetime_string + '.tar.gz ' for objective in objectives_list: if objective != '' and objective is not None: objective = objective.replace(' /', ' ') objective = objective[1:] tar_command = tar_command + ' ' + objective if not os.path.isdir(destination): execution_mkdir = SubprocessExecution.main_execution_function( SubprocessExecution(), 'mkdir ' + destination) print tar_command execution_message = SubprocessExecution.main_execution_function( SubprocessExecution(), tar_command) return execution_message
def __init__(self): self.args_list = self.__get_parameters() if self.args_list.PREFIX_FOLDER: self.script_prefix = self.args_list.PREFIX_FOLDER else: self.script_prefix = "mydump" if self.args_list.MYSQL_DUMP_BINARY: self.mysql_dump_binary = self.args_list.MYSQL_DUMP_BINARY else: self.mysql_dump_binary = "/usr/bin/mysqldump" if self.args_list.MYSQL_BINARY: self.MYSQL = self.args_list.MYSQL_BINARY else: self.MYSQL = "/usr/bin/mysql" if self.args_list.TAR_COMMAND: self.tar_command = self.args_list.TAR_COMMAND else: self.tar_command = 'sudo /bin/tar czf' sys.path.append(self.args_list.HOME_FOLDER) from execution.config_parser import ConfigParser if not ConfigParser.check_exists(ConfigParser(), self.args_list.MY_INSTANCES): self.args_list.MY_INSTANCES = '3306' if not ConfigParser.check_exists(ConfigParser(), self.args_list.DESTINATION): self.args_list.DESTINATION = '/opt/backup' self.DESTINATION = self.args_list.DESTINATION + '/' + self.script_prefix self.PREFIX_BACKUP = time.strftime('%Y%m%d', time.localtime( time.time())) + "_" + self.args_list.HOSTNAME if self.DESTINATION: sys.path.append(self.args_list.HOME_FOLDER) # from compression.zip_compression import ZipCompression from execution.subprocess_execution import SubprocessExecution if not os.path.isdir(self.DESTINATION): create_dir_cmd = 'mkdir ' + self.DESTINATION execution_mkdir = SubprocessExecution.main_execution_function( SubprocessExecution(), create_dir_cmd, True) if execution_mkdir[0] != 0: print 'Could Not create directory with command: ' + create_dir_cmd print 'Error code: ' + str(execution_mkdir[0])
def file_backup_execution(self, filesets, destination='', excluded_filesets='', tar_command=''): """Execute the files tar and compression""" print 'Making a compressed copy of the local files to: ' + destination if excluded_filesets: print 'Backup objective(s): ' + filesets + '. Excluded files: ' + excluded_filesets else: print 'Backup objective(s): ' + filesets + '. No files Excluded.' # Excluded filesets if excluded_filesets != '' and excluded_filesets is not None: excluded_files = ' --exclude=' + excluded_filesets[1:] excluded_files = excluded_files.replace('/ ', ' ') excluded_files = excluded_files.replace(' /', ' --exclude ') else: excluded_files = '' datetime_string = time.strftime("%Y%m%d_%H%M%S") os_name = OSInformation.isWindows() execution_message = 'Error' if (os_name): if not os.path.isdir(destination + '\\files'): create_dir_cmd = 'mkdir ' + destination + '\\files' execution_mkdir = SubprocessExecution.main_execution_function( SubprocessExecution(), create_dir_cmd, True) print execution_mkdir result_file_name = destination + '\\files\\compressed\\filebackup_' + datetime_string filesets = filesets.split() ZipCompression(result_file_name + '.zip', filesets) else: if filesets != '' and filesets is not None: filesets = filesets.replace(' /', ' ') filesets = filesets[1:] else: print sys.stderr.write( 'ERROR: The --FILESET_INCLUDE can not be empty; execution') sys.exit(1) if os.geteuid() == 0: sys.stderr.write( 'Execution as root is not allowed the GID for this user can not be 0' ) exit(1) else: tar_command = tar_command + ' ' + destination + '/files/filesbackup_' \ + datetime_string + '.tar.gz ' + filesets + excluded_files if not os.path.isdir(destination + '/files'): create_dir_cmd = 'mkdir ' + destination + '/files' execution_mkdir = SubprocessExecution.main_execution_function( SubprocessExecution(), create_dir_cmd, True) if execution_mkdir[0] != 0: print 'Could Not create directory with command: ' + create_dir_cmd print 'Error code: ' + str(execution_mkdir[0]) execution_message = SubprocessExecution.main_execution_function( SubprocessExecution(), tar_command, True) # tar exits with 1 if the file changed during the time we read it. Ignoring # https://www.gnu.org/software/tar/manual/html_section/tar_19.html#Synopsis print execution_message if execution_message[0] not in [0, 1]: print 'Executing the tar command: ' + tar_command print 'Returned nor zero exit code: ' + str(execution_message[0],) + ', ' + str(execution_message[1]) + \ ', ' + str(execution_message[2]) exit(1) else: print 'Successful execution: ' + str(execution_message[0]) + ', ' + str(execution_message[1]) + \ ', ' + str(execution_message[2])
def create_directory(destination): if not isdir(destination): execution_mkdir = SubprocessExecution.main_execution_function( SubprocessExecution(), 'mkdir ' + destination)
logger) reporter.send_post_report() else: logger.info('No report(s) enabled in configuration.') elif type(json_dict) is str: logger.critical('Execution Error with: ' + json_dict + command_object.config) else: logger.critical('Execution Error with: ' + command_object.config) # provissional logging feature from execution.subprocess_execution import SubprocessExecution log = json_dict['GENERAL']['LOG_FOLDER'] log1 = json_dict['GENERAL']['LOG_FOLDER'] + '1' log2 = json_dict['GENERAL']['LOG_FOLDER'] + '2' if os.path.exists(log1): command_rotatelogs = 'mv ' + log1 + ' ' + log2 execution_rotation_result = SubprocessExecution.main_execution_function( SubprocessExecution(), command_rotatelogs, True) command_rotatelogs = 'mv ' + log + ' ' + log1 execution_rotation_result = SubprocessExecution.main_execution_function( SubprocessExecution(), command_rotatelogs, True) # End of execution logger.info('Execution ends here.') logger = logging.getLogger('ncbackup')
def works_execution(self): # VALIDATIONS PENDING if not self.__parameters_dict['DESTINATION'] \ or not ConfigParser.is_existing_abs_path(ConfigParser(), self.__parameters_dict['DESTINATION']): destination_not_found = 'Mongo script needs a DESTINATION folder: ' \ + self.__parameters_dict['DESTINATION'] + ' can not be found.' print destination_not_found self.__logger(destination_not_found) return 1, '', destination_not_found dir_mongo_backup = self.__parameters_dict[ 'DESTINATION'] + '/' + self.__parameters_dict['PREFIX_FOLDER'] if not ConfigParser.is_existing_abs_path(ConfigParser(), dir_mongo_backup): result_mkdir_mongo_backup = SubprocessExecution.main_execution_function( SubprocessExecution(), 'mkdir ' + dir_mongo_backup, self.__logger) self.__logger.info(result_mkdir_mongo_backup) if result_mkdir_mongo_backup[ 0] is not None and result_mkdir_mongo_backup[0] != 0: message_mkdir = 'Could not create direstory ' + dir_mongo_backup \ + ' if the software can not create mongo backup FAILS' self.__logger.critical(message_mkdir) print message_mkdir self.__result_mongo_dump_execution = 1, '', message_mkdir return 1, '', message_mkdir datetime_string = time.strftime("%Y%m%d_%H%M%S") if not ConfigParser.is_existing_abs_path( ConfigParser(), dir_mongo_backup + '/dump_' + datetime_string): result_mkdir_mongo_backup = SubprocessExecution.main_execution_function( SubprocessExecution(), 'mkdir ' + dir_mongo_backup + '/dump_' + datetime_string, self.__logger) self.__logger.info(result_mkdir_mongo_backup) if result_mkdir_mongo_backup[ 0] is not None and result_mkdir_mongo_backup[0] != 0: message_mkdir = 'Could not create direstory ' + dir_mongo_backup + '/dump_' + datetime_string \ + ' if the software can not create this folder mongo backup FAILS' self.__logger.critical(message_mkdir) print message_mkdir self.__result_mongo_dump_execution = 1, '', message_mkdir return 1, '', message_mkdir print result_mkdir_mongo_backup mongo_dump_command = self.__parameters_dict['MONGODUMP_BIN'] if type(self.__parameters_dict['MONGO_HOST']) is str: mongo_host = self.__parameters_dict['MONGO_HOST'] else: mongo_host = '127.0.0.1' mongo_dump_command += ' -h ' + mongo_host + ' -o ' + dir_mongo_backup + '/dump_' + datetime_string if self.__parameters_dict.get('MONGO_USER') and self.__parameters_dict['MONGO_USER'] != '' \ and self.__parameters_dict['MONGO_USER'] is not None: mongo_dump_command += ' --username ' + self.__parameters_dict[ 'MONGO_USER'] if self.__parameters_dict.get('MONGO_PWD') and self.__parameters_dict['MONGO_PWD'] != '' \ and self.__parameters_dict['MONGO_PWD'] is not None: mongo_dump_command += ' --password ' + self.__parameters_dict[ 'MONGO_PWD'] result_mongo_dump_execution = SubprocessExecution.main_execution_function( SubprocessExecution(), mongo_dump_command, True, self.__logger) if result_mongo_dump_execution[0] != 0 and result_mongo_dump_execution[ 0] is not None: self.__logger.warning('MongoDB backup failed') self.__logger.warning('Error code: ' + str(result_mongo_dump_execution[0])) self.__logger.warning('StdOut: ' + str(result_mongo_dump_execution[1])) self.__logger.warning(str(result_mongo_dump_execution[2])) self.__result_mongo_dump_execution = result_mongo_dump_execution return result_mongo_dump_execution compress_mongo_files_dir = self.__parameters_dict['TAR_COMMAND'] + ' ' + dir_mongo_backup + '/dump_' \ + datetime_string + ".tar.gz" + ' ' + dir_mongo_backup + '/dump_' \ + datetime_string SubprocessExecution.main_execution_function(SubprocessExecution(), compress_mongo_files_dir, self.__logger) SubprocessExecution.main_execution_function(SubprocessExecution(), 'rm -rf ' + dir_mongo_backup + '/dump_' \ + datetime_string, self.__logger) # print result_mongo_dump_execution self.__result_mongo_dump_execution = result_mongo_dump_execution return 'Execution finished successfully'
def cat_files (path_to_file): command_cat = 'cat ' + path_to_file + '.* > ' + path_to_file print command_cat cat_execution = SubprocessExecution.main_execution_function(SubprocessExecution(), command_cat) return cat_execution
def split_file(self,path_to_file, chunk_size): # To be deprecated in favor of split_binary_file print path_to_file command_split = 'split --bytes=' + chunk_size + 'M ' + path_to_file + ' ' + path_to_file print command_split execution_split = SubprocessExecution.main_execution_function(SubprocessExecution(), command_split, True)
remove_objectives(encryption_command.OBJECTIVES, encryption_command.REMOVE_OBJECTIVES) # Decryption elif encryption_command.DECRYPT == '-d' or encryption_command.DECRYPT is True: print 'You have chosen to decrypt with -d option' if encryption_command.OBJECTIVES and encryption_command.DESTINATION: datetime_string = time.strftime("%Y%m%d_%H%M%S") if not encryption_command.OBJECTIVES.endswith('000'): cat_execution_result = EncryptionWorks.cat_files(encryption_command.OBJECTIVES) if cat_execution_result[0] != 0: print 'Error:Cat retuned a non zero exit code.' exit(1) with open(encryption_command.OBJECTIVES, 'rb') as in_file: with open(encryption_command.DESTINATION, 'wb') as out_file: with open(encryption_command.KEY_FILE, 'r') as key_file: key_from_file = key_file.read().replace('\n', '') EncryptionWorks.decrypt(EncryptionWorks(), in_file, out_file, key_from_file, 32, encryption_command.HOME_FOLDER) else: result_name = encryption_command.OBJECTIVES.replace('.000', '') cat_execution_result = SubprocessExecution.main_execution_function(SubprocessExecution(), 'mv ' + encryption_command.OBJECTIVES + ' ' + result_name) with open(result_name, 'rb') as in_file: with open(encryption_command.DESTINATION, 'wb') as out_file: with open(encryption_command.KEY_FILE, 'r') as key_file: key_from_file =key_file.read().replace('\n', '') EncryptionWorks.decrypt(EncryptionWorks(), in_file, out_file, key_from_file, 32, encryption_command.HOME_FOLDER)
def works_execution(self): self.__execution_result = True if self.__parameters_dict['DESTINATION'] and self.__parameters_dict[ 'PREFIX_FOLDER']: save_dir = self.__parameters_dict['DESTINATION'] \ + '/' + self.__parameters_dict['PREFIX_FOLDER'] + '/' if not os.path.isdir(save_dir): SubprocessExecution.main_execution_function( SubprocessExecution(), 'mkdir ' + save_dir, self.__logger) if self.__parameters_dict[ 'EXCLUDE_DB'] is not None or self.__parameters_dict[ 'EXCLUDE_DB'] == '': exclude_db = self.__parameters_dict['EXCLUDE_DB'].split() exclude_db.append('template0') exclude_db.append('template1') exclude_db.append('|') else: exclude_db = ['template0', 'template1', '|'] # 'psql -l' produces a list of PostgreSQL databases. # get_list = os.popen('psql -l').readlines() get_list = SubprocessExecution.main_execution_function( SubprocessExecution(), 'psql -l', True, self.__logger) if get_list[0] == 0: get_list = get_list[1].split('\n') else: self.__execution_result = False self.__logger.critical('Error code: ' + str(get_list[0])) self.__logger.critical('StdOut: ' + get_list[1]) self.__logger.critical('StdErr: ' + get_list[2]) # print get_list[2] return 'Execution faile due to error listing postgres DBs' # Exclude header and footer lines. db_list = get_list[3:-3] # print db_list # Extract database names from first element of each row. for n in db_list: n_row = string.split(n) # print n_row n_db = n_row[0] # Pipe database dump through gzip # into .gz files for all databases # except template*. if n_db in exclude_db: pass else: print n_db result_dump = \ SubprocessExecution.main_execution_function(SubprocessExecution(), 'pg_dump ' + n_db + ' | gzip -c > ' + save_dir + n_db + '.gz' , True, self.__logger) if result_dump[0] != 0: self.__execution_result = False self.__logger.critical('Error code: ' + str(result_dump[0])) self.__logger.critical('StdOut: ' + result_dump[1]) self.__logger.critical('StdErr: ' + result_dump[2]) return 'Execution failed while executing: ' + 'pg_dump ' + n_db + ' | gzip -c > ' + save_dir + n_db + '.gz' # if DB backup ROLES. # https://www.postgresql.org/docs/8.1/static/app-pg-dumpall.html # option: --globals-only # this is before 8.3 # on 8.3 they added --roles-only # https://www.postgresql.org/docs/8.3/static/app-pg-dumpall.html # but still have --globals-only # Need to add condition for older versions of postgress. result_dump = \ SubprocessExecution.main_execution_function(SubprocessExecution(), 'pg_dumpall -r | gzip -c > ' + save_dir + 'roles' + '.gz' , True, self.__logger) print result_dump else: self.__execution_result = False
def works_execution(self): rsync_commands = self.iterate_works('ORIGIN_AND_TARGETS_PARAMS') for i in rsync_commands: execution_output = SubprocessExecution.main_execution_function( SubprocessExecution(), rsync_commands[i], True, self.__logger) return execution_output