def deploy_configuration(computer, user): """ Deploy configuration on remote machine (run "ssh-copy-id -i pub_file -f <user>@<computer>") :param computer: remote computer than deploy RSA key :param user: remote user on computer """ # Create home path home = os.path.expanduser('~') ssh_folder = os.path.join(home, '.ssh') # Remove private key file id_rsa_pub_file = os.path.join(ssh_folder, 'id_rsa.pub') print_verbose(args.verbose, 'Public id_rsa is {0}'.format(id_rsa_pub_file)) if os.path.exists(id_rsa_pub_file): print('Copying configuration to' + utility.PrintColor.BOLD + ' {0}'.format(computer) + utility.PrintColor.END + '; write the password:'******'ssh-copy-id -i {0} {1}@{2}'.format( id_rsa_pub_file, user, computer), shell=True) print_verbose(args.verbose, 'Return code of ssh-copy-id: {0}'.format(return_code)) if return_code == 0: print(utility.PrintColor.GREEN + "SUCCESS: Configuration copied successfully on {0}!".format( computer) + utility.PrintColor.END) else: print( utility.PrintColor.RED + "ERROR: Configuration has not been copied successfully on {0}!" .format(computer) + utility.PrintColor.END) else: print(utility.PrintColor.YELLOW + "WARNING: Public key ~/.ssh/id_rsa.pub is not exist" + utility.PrintColor.END) exit(2)
def remove_configuration(): """ Remove a new configuration (remove an exist RSA key pair) """ # Create home path home = os.path.expanduser('~') ssh_folder = os.path.join(home, '.ssh') if utility.confirm('Are you sure to remove existing rsa keys?'): # Remove private key file id_rsa_file = os.path.join(ssh_folder, 'id_rsa') print_verbose(args.verbose, 'Remove private id_rsa {0}'.format(id_rsa_file)) if os.path.exists(id_rsa_file): os.remove(id_rsa_file) else: print(utility.PrintColor.YELLOW + "WARNING: Private key ~/.ssh/id_rsa is not exist" + utility.PrintColor.END) exit(2) # Remove public key file id_rsa_pub_file = os.path.join(ssh_folder, 'id_rsa.pub') print_verbose(args.verbose, 'Remove public id_rsa {0}'.format(id_rsa_pub_file)) if os.path.exists(id_rsa_pub_file): os.remove(id_rsa_pub_file) else: print(utility.PrintColor.YELLOW + "WARNING: Public key ~/.ssh/id_rsa.pub is not exist" + utility.PrintColor.END) exit(2) print(utility.PrintColor.GREEN + "SUCCESS: Removed configuration successfully!" + utility.PrintColor.END)
def get_last_full(catalog): """ Get the last full :param catalog: configparser object :return: path (string) """ config = read_catalog(catalog) if config: dates = [] for bid in config.sections(): if config.get(bid, 'type') == 'Full' and config.get( bid, 'name') == hostname: dates.append( utility.string_to_time(config.get(bid, 'timestamp'))) if dates: last_full = utility.time_to_string(max(dates)) if last_full: print_verbose(args.verbose, 'Last full is {0}'.format(last_full)) for bid in config.sections(): if config.get(bid, 'type') == 'Full' and \ config.get(bid, 'name') == hostname and \ config.get(bid, 'timestamp') == last_full: return config.get(bid, 'path') else: return False
def compose_command(flags, host): """ Compose rsync command for action :param flags: Dictionary than contains info for command :param host: Hostname of machine :return: list """ print_verbose(args.verbose, 'Build a rsync command') # Set rsync binary command = ['rsync'] if flags.action == 'backup': # Set mode option if flags.mode == 'Full': command.append('-ah') command.append('--no-links') # Write catalog file write_catalog(catalog_path, backup_id, 'type', 'Full') elif flags.mode == 'Incremental': last_full = get_last_full(catalog_path) if last_full: command.append('-ahu') command.append('--no-links') command.append('--link-dest={0}'.format(last_full)) # Write catalog file write_catalog(catalog_path, backup_id, 'type', 'Incremental') else: command.append('-ah') command.append('--no-links') # Write catalog file write_catalog(catalog_path, backup_id, 'type', 'Full') elif flags.mode == 'Mirror': command.append('-ah') command.append('--delete') # Write catalog file write_catalog(catalog_path, backup_id, 'type', 'Mirror') # Set verbosity if flags.verbose: command.append('-vP') # Set compress mode if flags.compress: command.append('-z') if flags.log: log_path = os.path.join( compose_destination(host, flags.destination), 'backup.log') command.append('--log-file={0}'.format(log_path)) utility.write_log(log_args['status'], log_args['destination'], 'INFO', 'rsync log path: {0}'.format(log_path)) elif flags.action == 'restore': command.append('-ahu') if flags.verbose: command.append('-vP') if flags.log: log_path = os.path.join(rpath, 'restore.log') command.append('--log-file={0}'.format(log_path)) utility.write_log(log_args['status'], log_args['destination'], 'INFO', 'rsync log path: {0}'.format(log_path)) print_verbose(args.verbose, 'Command flags are: {0}'.format(' '.join(command))) return command
def print_version(version): """ Print version of Butterfly Backup :return: str """ print_verbose(args.verbose, 'Print version and logo') if args.verbose: print_logo() print(utility.PrintColor.BOLD + 'Version: ' + utility.PrintColor.END + version) exit()
def compose_source(action, os_name, sources): """ Compose source :param action: command action (backup, restore, archive) :param os_name: Name of operating system :param sources: Dictionary or string than contains the paths of source :return: list """ if action == 'backup': src_list = [] # Add include to the list folders = map_dict_folder(os_name) # Write catalog file write_catalog(catalog_path, backup_id, 'os', os_name) custom = True if 'System' in sources: src_list.append(':{0}'.format(folders['System'])) return src_list if 'User' in sources: src_list.append(':{0}'.format(folders['User'])) custom = False if 'Config' in sources: src_list.append(':{0}'.format(folders['Config'])) custom = False if 'Application' in sources: src_list.append(':{0}'.format(folders['Application'])) custom = False if 'Log' in sources: src_list.append(':{0}'.format(folders['Log'])) custom = False if custom: # This is custom data for custom_data in sources: src_list.append( ':{0}'.format("'" + custom_data.replace("'", "'\\''") + "'")) utility.write_log( log_args['status'], log_args['destination'], 'INFO', 'OS {0}; backup folder {1}'.format(os_name, ' '.join(src_list))) print_verbose(args.verbose, 'Include this criteria: {0}'.format(' '.join(src_list))) return src_list
def read_catalog(catalog): """ Read a catalog file :param catalog: catalog file :return: catalog file (configparser) """ config = configparser.ConfigParser() file = config.read(catalog) if file: return config else: print_verbose(args.verbose, 'Catalog not found! Create a new one.') if os.path.exists(os.path.dirname(catalog)): utility.touch(catalog) config.read(catalog) return config else: print(utility.PrintColor.RED + 'ERROR: Folder {0} not exist!'.format( os.path.dirname(catalog)) + utility.PrintColor.END) exit(1)
def compose_destination(computer_name, folder): """ Compose folder destination of backup :param computer_name: name of source computer :param folder: path of backup :return: string """ # Create root folder of backup first_layer = os.path.join(folder, computer_name) second_layer = os.path.join(first_layer, utility.time_for_folder()) if not os.path.exists(first_layer): os.mkdir(first_layer) utility.write_log(log_args['status'], log_args['destination'], 'INFO', 'Create folder {0}'.format(first_layer)) if not os.path.exists(second_layer): os.mkdir(second_layer) utility.write_log(log_args['status'], log_args['destination'], 'INFO', 'Create folder {0}'.format(second_layer)) # Write catalog file write_catalog(catalog_path, backup_id, 'path', second_layer) print_verbose(args.verbose, 'Destination is {0}'.format(second_layer)) return second_layer
def archive_policy(catalog, destination): """ Archive policy :param catalog: catalog file :param destination: destination pth of archive file """ config = read_catalog(catalog) for bid in config.sections(): full_count = count_full(config, config.get(bid, 'name')) if (config.get(bid, 'archived', fallback='unset') == 'unset') and not \ (config.get(bid, 'cleaned', fallback=False)): type_backup = config.get(bid, 'type') path = config.get(bid, 'path') date = config.get(bid, 'timestamp') logpath = os.path.join(os.path.dirname(path), 'general.log') utility.print_verbose( args.verbose, "Check archive this backup {0}. Folder {1}".format(bid, path)) if (type_backup == 'Full') and (full_count <= 1): continue archive = utility.archive(path, date, args.days, destination) if archive == 0: write_catalog(catalog, bid, 'archived', 'True') print(utility.PrintColor.GREEN + 'SUCCESS: Archive {0} successfully.'.format(path) + utility.PrintColor.END) utility.write_log(log_args['status'], logpath, 'INFO', 'Archive {0} successfully.'.format(path)) elif archive == 1: print(utility.PrintColor.RED + 'ERROR: Archive {0} failed.'.format(path) + utility.PrintColor.END) utility.write_log(log_args['status'], logpath, 'ERROR', 'Archive {0} failed.'.format(path)) else: utility.print_verbose( args.verbose, "No archive backup {0}. Folder {1}".format(bid, path))
def retention_policy(host, catalog, logpath): """ Retention policy :param host: hostname of machine :param catalog: catalog file :param logpath: path of log file """ config = read_catalog(catalog) full_count = count_full(config, host) for bid in config.sections(): if (config.get(bid, 'cleaned', fallback='unset') == 'unset') and (config.get(bid, 'name') == host): type_backup = config.get(bid, 'type') path = config.get(bid, 'path') date = config.get(bid, 'timestamp') utility.print_verbose( args.verbose, "Check cleanup this backup {0}. Folder {1}".format(bid, path)) if (type_backup == 'Full') and (full_count <= 1): continue cleanup = utility.cleanup(path, date, args.retention) if cleanup == 0: write_catalog(catalog, bid, 'cleaned', 'True') print(utility.PrintColor.GREEN + 'SUCCESS: Cleanup {0} successfully.'.format(path) + utility.PrintColor.END) utility.write_log(log_args['status'], logpath, 'INFO', 'Cleanup {0} successfully.'.format(path)) elif cleanup == 1: print(utility.PrintColor.RED + 'ERROR: Cleanup {0} failed.'.format(path) + utility.PrintColor.END) utility.write_log(log_args['status'], logpath, 'ERROR', 'Cleanup {0} failed.'.format(path)) else: utility.print_verbose( args.verbose, "No cleanup backup {0}. Folder {1}".format(bid, path))
def run_in_parallel(fn, commands, limit): """ Run in parallel with limit :param fn: function in parallelism :param commands: args commands of function :param limit: number of parallel process """ # Start a Pool with "limit" processes pool = Pool(processes=limit) jobs = [] for command, plog in zip(commands, logs): # Run the function proc = pool.apply_async(func=fn, args=(command, )) jobs.append(proc) print('Start {0} on {1}'.format(args.action, plog['hostname'])) print_verbose(args.verbose, "rsync command: {0}".format(command)) utility.write_log( log_args['status'], plog['destination'], 'INFO', 'Start process {0} on {1}'.format(args.action, plog['hostname'])) if args.action == 'backup': # Write catalog file write_catalog(catalog_path, plog['id'], 'start', utility.time_for_log()) # Wait for jobs to complete before exiting while not all([p.ready() for p in jobs]): time.sleep(5) # Check exit code of command for p, command, plog in zip(jobs, commands, logs): if p.get() != 0: print(utility.PrintColor.RED + 'ERROR: Command {0} exit with code: {1}'.format( command, p.get()) + utility.PrintColor.END) utility.write_log( log_args['status'], plog['destination'], 'ERROR', 'Finish process {0} on {1} with error:{2}'.format( args.action, plog['hostname'], p.get())) if args.action == 'backup': # Write catalog file write_catalog(catalog_path, plog['id'], 'end', utility.time_for_log()) write_catalog(catalog_path, plog['id'], 'status', "{0}".format(p.get())) else: print(utility.PrintColor.GREEN + 'SUCCESS: Command {0}'.format(command) + utility.PrintColor.END) utility.write_log( log_args['status'], plog['destination'], 'INFO', 'Finish process {0} on {1}'.format(args.action, plog['hostname'])) if args.action == 'backup': # Write catalog file write_catalog(catalog_path, plog['id'], 'end', utility.time_for_log()) write_catalog(catalog_path, plog['id'], 'status', "{0}".format(p.get())) if args.retention: # Retention policy retention_policy(plog['hostname'], catalog_path, plog['destination']) # Safely terminate the pool pool.close() pool.join()
def new_configuration(): """ Create a new configuration (create a RSA key pair) """ from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives.asymmetric import rsa from cryptography.hazmat.backends import default_backend # Generate private/public key pair print_verbose(args.verbose, 'Generate private/public key pair') private_key = rsa.generate_private_key(backend=default_backend(), public_exponent=65537, key_size=2048) # Get public key in OpenSSH format print_verbose(args.verbose, 'Get public key in OpenSSH format') public_key = private_key.public_key().public_bytes( serialization.Encoding.OpenSSH, serialization.PublicFormat.OpenSSH) # Get private key in PEM container format print_verbose(args.verbose, 'Get private key in PEM container format') pem = private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption()) # Decode to printable strings private_key_str = pem.decode('utf-8') public_key_str = public_key.decode('utf-8') # Create home path home = os.path.expanduser('~') # Create folder .ssh ssh_folder = os.path.join(home, '.ssh') print_verbose(args.verbose, 'Create folder {0}'.format(ssh_folder)) if not os.path.exists(ssh_folder): os.mkdir(ssh_folder, mode=0o755) # Create private key file id_rsa_file = os.path.join(ssh_folder, 'id_rsa') print_verbose(args.verbose, 'Create private key file {0}'.format(id_rsa_file)) if not os.path.exists(id_rsa_file): with open(id_rsa_file, 'w') as id_rsa: os.chmod(id_rsa_file, mode=0o600) id_rsa.write(private_key_str) else: print(utility.PrintColor.YELLOW + "WARNING: Private key ~/.ssh/id_rsa exists" + utility.PrintColor.END) print( 'If you want to use the existing key, run "bb config --deploy name_of_machine", otherwise to remove it, ' 'run "bb config --remove"') exit(2) # Create private key file id_rsa_pub_file = os.path.join(ssh_folder, 'id_rsa.pub') print_verbose(args.verbose, 'Create public key file {0}'.format(id_rsa_pub_file)) if not os.path.exists(id_rsa_pub_file): with open(id_rsa_pub_file, 'w') as id_rsa_pub: os.chmod(id_rsa_pub_file, mode=0o644) id_rsa_pub.write(public_key_str) else: print(utility.PrintColor.YELLOW + "WARNING: Public key ~/.ssh/id_rsa.pub exists" + utility.PrintColor.END) print( 'If you want to use the existing key, run "bb config --deploy name_of_machine", otherwise to remove it, ' 'run "bb config --remove"') exit(2) print(utility.PrintColor.GREEN + "SUCCESS: New configuration successfully created!" + utility.PrintColor.END)
log_args = { 'id': backup_id, 'hostname': hostname, 'status': args.log, 'destination': os.path.join(args.destination, hostname, 'general.log') } logs.append(log_args) catalog_path = os.path.join(args.destination, '.catalog.cfg') # Compose command cmd = compose_command(args, hostname) print_verbose( args.verbose, 'Create a folder structure for {0} os'.format(args.type)) # Write catalog file write_catalog(catalog_path, backup_id, 'name', hostname) # Compose source if args.data: srcs = args.data source_list = compose_source(args.action, args.type, srcs) elif args.customdata: srcs = args.customdata source_list = compose_source(args.action, args.type, srcs) else: source_list = [] # Compose source <user>@<hostname> format cmd.append('{0}@{1}'.format(args.user, hostname).__add__( " ".join(source_list)))