def generate(backup_type, storages, path_to_file): """ The function generate a configuration file job. """ backup_type = backup_type[0] path_to_file = path_to_file[0] template_path = f'{TEMPLATES_DIR}/backup_type/{backup_type}.conf' if path_to_file.startswith('/'): general_function.create_dirs( job_name=backup_type, dirs_pairs={os.path.dirname(path_to_file): ''}) general_function.copy_ofs(template_path, path_to_file) try: fd = open(path_to_file, 'a') except (OSError, PermissionError, FileNotFoundError) as e: messange_info = f"Couldn't open file {path_to_file}:{e}!" general_function.print_info(messange_info) sys.exit(1) if backup_type in config.supported_db_backup_type: job_type = 'databases' elif backup_type in config.supported_file_backup_type: job_type = 'files' else: job_type = 'external' for storage in storages: storage_template_path = f'{TEMPLATES_DIR}/storages/{storage}.conf' with open(storage_template_path, 'r', encoding='utf-8') as f: str_storage = f.read() str_storage = str_storage.replace('backup_type', backup_type) str_storage = str_storage.replace('job_type', job_type) if backup_type == 'inc_files': str_storage = str_storage.replace('inc_files/dump', 'inc') str_storage = re.sub( r"[ ]*store:[\s]*days: ''[\s]*weeks: ''[\s]*month: ''[\s]*", '', str_storage) if backup_type == 'desc_files': str_storage = str_storage.replace('desc_files/dump', 'desc/dump') if backup_type == 'external': str_storage = str_storage.replace('external/dump', 'dump') fd.write(str_storage) fd.close() os.chmod(path_to_file, 0o600) general_function.print_info( f"Successfully generated '{path_to_file}' configuration file!")
def writelog(log_level, log_message, fd, type_message=''): ''' The function of recording events in the log file. The input is given the following values: log_level - event level (error, info, warning); log_message - message; fd - file descriptor number of the log file; type_message is the section in the configuration file to which the event belongs. ''' log_str = get_log(log_level, log_message, type_message) try: fd.write(log_str) fd.flush() except (OSError, PermissionError, FileNotFoundError) as err: messange_info = "Couldn't write to log file:%s" % (err) general_function.print_info(messange_info) if log_level == 'ERROR': config.error_log += log_str config.debug_log += log_str if type_message: config.jobs_error_log[type_message].append(log_str) else: config.debug_log += log_str
def test_config(path_to_config): try: specific_function.get_parsed_string(path_to_config) except general_function.MyError as e: general_function.print_info("The configuration file '%s' syntax is bad: %s! " %(path_to_config, e)) else: general_function.print_info("The configuration file '%s' syntax is ok!" %(path_to_config)) finally: sys.exit()
def get_parser(): global VERSION if not VERSION: try: VERSION = version.VERSION except AttributeError as err: general_function.print_info('Can\'t get version from file version.py: %s' %(err)) VERSION = 'unknown' # Parent parsers version_parser = argparse.ArgumentParser(add_help=False) version_parser.add_argument('-v', '--version', action='version', version=VERSION) config_parser = argparse.ArgumentParser(add_help=False) config_parser.add_argument('-c', '--config', dest='path_to_config', type=str, action='store', help='path to config', default=r'/etc/nxs-backup/nxs-backup.conf') # Main parser command_parser = argparse.ArgumentParser(parents=[config_parser, version_parser], description='Make to backups with %(prog)s', usage='%(prog)s [arguments]') # Optional argument command_parser.add_argument('-t', '--test', dest='test_conf', action='store_true', help="Check the syntax of the configuration file.", ) # Positional argument subparsers = command_parser.add_subparsers(dest='cmd', help='List of commands') # Start command start_parser = subparsers.add_parser('start', parents=[config_parser], help='Start backup script for one of the job in config file.') start_parser.add_argument('jobs_name', type=str, help='One of the active job\'s name.', nargs='?', default='all') # Generate command generate_parser = subparsers.add_parser('generate', help='Generate backup\'s config file.') generate_parser.add_argument('-T', '--type', dest='backup_type', type=str, help='One of the type backup.', nargs=1, choices=config.supported_backup_type, required=True) generate_parser.add_argument('-S', '--storages', dest='storages', type=str, help='One or more storages.', nargs='+', choices=config.supported_storages, required=True) generate_parser.add_argument('-P', '--path', dest='path_to_generate_file', type=str, help='Path to generate config file.', nargs=1, required=True) return command_parser
def get_parsed_string(path_to_config): try: with open(path_to_config, 'r') as stream: try: yaml_str = yaml.load(stream, Loader=Loader) except yaml.YAMLError as e: raise general_function.MyError(str(e)) except RuntimeError as e: if "maximum recursion depth exceeded while calling" in str(e): error_msg = f" error in include value - '{e}'" else: error_msg = str(e) raise general_function.MyError(error_msg) except (FileNotFoundError, PermissionError): general_function.print_info( f"No such file '{path_to_config}' or permission denied!") sys.exit(1) else: return yaml_str
def get_conf_value(parsed_str): """ The function assigns a value to the key global program variables. At the input, the function takes a parsed line of the configuration file. """ global all_jobs_name global general_str global regular_str global regular_str_for_backup_type global general_str_for_backup_type global general_str_for_backup_type_db global general_str_for_backup_type_files global general_str_for_backup_type_external global regular_str_for_storage global general_str_for_storage global log_file global admin_mail global client_mail global level_message global mail_from global server_name global block_io_write global block_io_read global block_io_weight global general_path_to_all_tmp_dir global cpu_shares global supported_general_job global smtp_server global smtp_port global smtp_ssl global smtp_user global smtp_password global smtp_timeout global smtp_tls global loop_timeout global loop_interval general_str_for_backup_type_db = ', '.join(supported_db_backup_type) general_str_for_backup_type_files = ', '.join(supported_file_backup_type) general_str_for_backup_type_external = ', '.join( supported_external_backup_type) regular_str_for_backup_type = ''.join( ['^' + item + '$|' for item in supported_backup_type])[0:-1] general_str_for_backup_type = ', '.join(supported_backup_type) regular_str_for_storage = ''.join( ['^' + item + '$|' for item in supported_storages])[0:-1] general_str_for_storage = ', '.join(supported_storages) count_of_jobs = len(parsed_str['jobs']) for i in range(count_of_jobs): for j in range(count_of_jobs): a = parsed_str['jobs'][i]['job'] b = parsed_str['jobs'][j]['job'] if i != j and a == b: general_function.print_info( f"Duplicate job name '{a}'. You must use a unique name for the job's name." ) sys.exit(1) db_job_dict = {} file_job_dict = {} external_job_dict = {} for i in range(count_of_jobs): job_data = parsed_str['jobs'][i] backup_type = job_data['type'] job_name = job_data['job'] if backup_type in supported_db_backup_type: db_job_dict[job_name] = job_data elif backup_type in supported_file_backup_type: file_job_dict[job_name] = job_data elif backup_type in supported_external_backup_type: external_job_dict[job_name] = job_data else: general_function.print_info( f"Backup type '{backup_type}' in job '{job_name}' does not supported, so this job was ignored! " f"Only one of this type backup is allowed:{supported_backup_type}!" ) all_jobs_name = (list(db_job_dict.keys()) + list(file_job_dict.keys()) + list(external_job_dict.keys()) + supported_general_job) general_str = ', '.join(all_jobs_name) regular_str = ''.join(['^' + item + '$|' for item in all_jobs_name])[0:-1] log_file = parsed_str['main'].get('log_file', None) if not log_file: log_file = '/var/log/nxs-backup/nxs-backup.log' admin_mail = parsed_str['main']['admin_mail'] if not admin_mail: general_function.print_info( "Field 'admin_mail' in 'main' section can't be empty!") sys.exit(1) client_mail_array = parsed_str['main'].get('client_mail', []) for i in client_mail_array: client_mail.append(i) level_message = parsed_str['main']['level_message'] mail_from = parsed_str['main']['mail_from'] server_name = parsed_str['main']['server_name'] block_io_write = parsed_str['main'].get('block_io_write', None) block_io_read = parsed_str['main'].get('block_io_read', None) block_io_weight = parsed_str['main'].get('blkio_weight', None) general_path_to_all_tmp_dir = parsed_str['main'].get( 'general_path_to_all_tmp_dir', None) cpu_shares = parsed_str['main'].get('cpu_shares', None) smtp_port = parsed_str['main'].get('smtp_port', None) smtp_ssl = parsed_str['main'].get('smtp_ssl', None) smtp_server = parsed_str['main'].get('smtp_server', None) smtp_user = parsed_str['main'].get('smtp_user', None) smtp_password = parsed_str['main'].get('smtp_password', None) smtp_timeout = parsed_str['main'].get('smtp_timeout', None) smtp_tls = parsed_str['main'].get('smtp_tls', None) loop_timeout = parsed_str['main'].get('loop_timeout', None) loop_interval = parsed_str['main'].get( 'loop_interval', 30) # 30 seconds - default loop interval return db_job_dict, file_job_dict, external_job_dict
def do_backup(path_to_config, jobs_name): resource_constraint.set_limitations() try: parsed_string = specific_function.get_parsed_string(path_to_config) except general_function.MyError as e: general_function.print_info( "An error in the parse of the configuration file %s:%s!" % (path_to_config, e)) sys.exit(1) (db_jobs_dict, file_jobs_dict, external_jobs_dict) = config.get_conf_value(parsed_string) general_function.create_files('', config.log_file) if not jobs_name in config.all_jobs_name: general_function.print_info( "Only one of this job's name is allowed: %s" % (config.general_str)) sys.exit(1) try: config.filelog_fd = open(config.log_file, 'a') except OSError: # e.g. /dev/stdout try: config.filelog_fd = open(config.log_file, 'w') except (OSError, PermissionError, FileNotFoundError) as e: messange_info = "Couldn't open file %s:%s!" % (config.log_file, e) general_function.print_info(messange_info) log_and_mail.send_report(messange_info) sys.exit(1) except (PermissionError, FileNotFoundError) as e: messange_info = "Couldn't open file %s:%s!" % (config.log_file, e) general_function.print_info(messange_info) log_and_mail.send_report(messange_info) sys.exit(1) if general_function.is_running_script(): log_and_mail.writelog('ERROR', "Script already is running!", config.filelog_fd, '') config.filelog_fd.close() general_function.print_info("Script already is running!") sys.exit(1) log_and_mail.writelog('INFO', "Starting script.\n", config.filelog_fd) if jobs_name == 'all': log_and_mail.writelog('INFO', "Starting files block backup.", config.filelog_fd) for i in list(file_jobs_dict.keys()): current_jobs_name = file_jobs_dict[i]['job'] execute_job(current_jobs_name, file_jobs_dict[i]) log_and_mail.writelog('INFO', "Finishing files block backup.", config.filelog_fd) log_and_mail.writelog('INFO', "Starting databases block backup.", config.filelog_fd) for i in list(db_jobs_dict.keys()): current_jobs_name = db_jobs_dict[i]['job'] execute_job(current_jobs_name, db_jobs_dict[i]) log_and_mail.writelog('INFO', "Finishing databases block backup.\n", config.filelog_fd) log_and_mail.writelog('INFO', "Starting external block backup.", config.filelog_fd) for i in list(external_jobs_dict.keys()): current_jobs_name = external_jobs_dict[i]['job'] execute_job(current_jobs_name, external_jobs_dict[i]) log_and_mail.writelog('INFO', "Finishing external block backup.\n", config.filelog_fd) elif jobs_name == 'databases': log_and_mail.writelog('INFO', "Starting databases block backup.", config.filelog_fd) for i in list(db_jobs_dict.keys()): current_jobs_name = db_jobs_dict[i]['job'] execute_job(current_jobs_name, db_jobs_dict[i]) log_and_mail.writelog('INFO', "Finishing databases block backup.\n", config.filelog_fd) elif jobs_name == 'files': log_and_mail.writelog('INFO', "Starting files block backup.", config.filelog_fd) for i in list(file_jobs_dict.keys()): current_jobs_name = file_jobs_dict[i]['job'] execute_job(current_jobs_name, file_jobs_dict[i]) log_and_mail.writelog('INFO', "Finishing files block backup.\n", config.filelog_fd) elif jobs_name == 'external': log_and_mail.writelog('INFO', "Starting external block backup.", config.filelog_fd) for i in list(external_jobs_dict.keys()): current_jobs_name = external_jobs_dict[i]['job'] execute_job(current_jobs_name, external_jobs_dict[i]) log_and_mail.writelog('INFO', "Finishing external block backup.\n", config.filelog_fd) else: if jobs_name in list(db_jobs_dict.keys()): log_and_mail.writelog('INFO', "Starting databases block backup.", config.filelog_fd) execute_job(jobs_name, db_jobs_dict[jobs_name]) log_and_mail.writelog('INFO', "Finishing databases block backup.\n", config.filelog_fd) if jobs_name in list(file_jobs_dict.keys()): log_and_mail.writelog('INFO', "Starting files block backup.", config.filelog_fd) execute_job(jobs_name, file_jobs_dict[jobs_name]) log_and_mail.writelog('INFO', "Finishing files block backup.\n", config.filelog_fd) else: log_and_mail.writelog('INFO', "Starting external block backup.", config.filelog_fd) execute_job(jobs_name, external_jobs_dict[jobs_name]) log_and_mail.writelog('INFO', "Finishing external block backup.\n", config.filelog_fd) log_and_mail.writelog('INFO', "Stopping script.", config.filelog_fd) log_and_mail.send_report() config.filelog_fd.close()
import resource_constraint import mysql_backup import mysql_xtradb_backup import postgresql_backup import postgresql_hot_backup import mongodb_backup import redis_backup import desc_files_backup import inc_files_backup import external_backup import generate_config try: import version except ImportError as err: general_function.print_info("Can't get version from file version.py: %s" % (err)) VERSION = 'unknown' else: VERSION = '' def do_backup(path_to_config, jobs_name): resource_constraint.set_limitations() try: parsed_string = specific_function.get_parsed_string(path_to_config) except general_function.MyError as e: general_function.print_info( "An error in the parse of the configuration file %s:%s!" % (path_to_config, e)) sys.exit(1)
import resource_constraint import mysql_backup import mysql_xtrabackup import postgresql_backup import postgresql_basebackup import mongodb_backup import redis_backup import desc_files_backup import inc_files_backup import external_backup import generate_config try: import version except ImportError as err: general_function.print_info( f"Can't get version from file version.py: {err}") VERSION = 'unknown' else: VERSION = '' def do_backup(path_to_config, jobs_name): try: parsed_string = specific_function.get_parsed_string(path_to_config) except general_function.MyError as e: general_function.print_info( f"An error in the parse of the configuration file {path_to_config}:{e}!" ) sys.exit(1)
def do_backup(path_to_config, jobs_name): try: parsed_string = specific_function.get_parsed_string(path_to_config) except general_function.MyError as e: general_function.print_info( f"An error in the parse of the configuration file {path_to_config}:{e}!" ) sys.exit(1) db_jobs_dict, file_jobs_dict, external_jobs_dict = config.get_conf_value( parsed_string) resource_constraint.set_limitations() general_function.create_files('', config.log_file) if jobs_name not in config.all_jobs_name: general_function.print_info( f"Only one of this job's name is allowed: {config.general_str}") sys.exit(1) try: config.filelog_fd = open(config.log_file, 'a') except OSError: # e.g. /dev/stdout try: config.filelog_fd = open(config.log_file, 'w') except (OSError, PermissionError, FileNotFoundError) as e: messange_info = f"Couldn't open file {config.log_file}:{e}!" general_function.print_info(messange_info) log_and_mail.send_report(messange_info) sys.exit(1) except (PermissionError, FileNotFoundError) as e: messange_info = f"Couldn't open file {config.log_file}:{e}!" general_function.print_info(messange_info) log_and_mail.send_report(messange_info) sys.exit(1) try: general_function.get_lock() except general_function.MyError as ex: msg = ex.message log_and_mail.writelog('ERROR', f"{msg}", config.filelog_fd, '') general_function.print_info(f"{msg}") sys.exit(1) log_and_mail.writelog('INFO', "Starting script.\n", config.filelog_fd) if jobs_name == 'all': log_and_mail.writelog('INFO', "Starting files block backup.", config.filelog_fd) for i in list(file_jobs_dict.keys()): current_jobs_name = file_jobs_dict[i]['job'] execute_job(current_jobs_name, file_jobs_dict[i]) log_and_mail.writelog('INFO', "Finishing files block backup.", config.filelog_fd) log_and_mail.writelog('INFO', "Starting databases block backup.", config.filelog_fd) for i in list(db_jobs_dict.keys()): current_jobs_name = db_jobs_dict[i]['job'] execute_job(current_jobs_name, db_jobs_dict[i]) log_and_mail.writelog('INFO', "Finishing databases block backup.\n", config.filelog_fd) log_and_mail.writelog('INFO', "Starting external block backup.", config.filelog_fd) for i in list(external_jobs_dict.keys()): current_jobs_name = external_jobs_dict[i]['job'] execute_job(current_jobs_name, external_jobs_dict[i]) log_and_mail.writelog('INFO', "Finishing external block backup.\n", config.filelog_fd) elif jobs_name == 'databases': log_and_mail.writelog('INFO', "Starting databases block backup.", config.filelog_fd) for i in list(db_jobs_dict.keys()): current_jobs_name = db_jobs_dict[i]['job'] execute_job(current_jobs_name, db_jobs_dict[i]) log_and_mail.writelog('INFO', "Finishing databases block backup.\n", config.filelog_fd) elif jobs_name == 'files': log_and_mail.writelog('INFO', "Starting files block backup.", config.filelog_fd) for i in list(file_jobs_dict.keys()): current_jobs_name = file_jobs_dict[i]['job'] execute_job(current_jobs_name, file_jobs_dict[i]) log_and_mail.writelog('INFO', "Finishing files block backup.\n", config.filelog_fd) elif jobs_name == 'external': log_and_mail.writelog('INFO', "Starting external block backup.", config.filelog_fd) for i in list(external_jobs_dict.keys()): current_jobs_name = external_jobs_dict[i]['job'] execute_job(current_jobs_name, external_jobs_dict[i]) log_and_mail.writelog('INFO', "Finishing external block backup.\n", config.filelog_fd) else: if jobs_name in list(db_jobs_dict.keys()): log_and_mail.writelog('INFO', "Starting databases block backup.", config.filelog_fd) execute_job(jobs_name, db_jobs_dict[jobs_name]) log_and_mail.writelog('INFO', "Finishing databases block backup.\n", config.filelog_fd) elif jobs_name in list(file_jobs_dict.keys()): log_and_mail.writelog('INFO', "Starting files block backup.", config.filelog_fd) execute_job(jobs_name, file_jobs_dict[jobs_name]) log_and_mail.writelog('INFO', "Finishing files block backup.\n", config.filelog_fd) else: log_and_mail.writelog('INFO', "Starting external block backup.", config.filelog_fd) execute_job(jobs_name, external_jobs_dict[jobs_name]) log_and_mail.writelog('INFO', "Finishing external block backup.\n", config.filelog_fd) log_and_mail.writelog('INFO', "Stopping script.", config.filelog_fd)