def create_base_dir(base_dir): log = logger.getlogger() print('\nMove or Copy the existing software server directories?') ch, action = get_selection('move\ncopy', ('m', 'c')) if action == 'copy': statvfs = os.statvfs(base_dir) freespace = statvfs.f_frsize * statvfs.f_bavail if freespace < 18000000000: sys.exit('Insufficient space on disk') arch = '' exists = glob('/srv/repos/dependencies/rhel7/*') if not exists: log.error('\nNo dependencies folder found. Unable to perform move.\n') sys.exit() for path in exists: if 'p8' in path or 'p9' in path: arch = 'ppc64le' break elif 'x86_64' in path: arch = 'x86_64' break if not arch: log.error('\nUnable to determine architecture. Unable to perform move.\n') sys.exit() if os.path.exists(f'{base_dir}/wmla120-{arch}'): print(f'Destination path {base_dir}/wmla120-{arch} already exists.') if action == 'copy': if not get_yesno('Okay to proceed with force copy? '): sys.exit('Exit at user request') else: os.mkdir(f'{base_dir}/wmla120-{arch}/') for _dir in (('repos', 'anaconda', 'spectrum-conductor', 'spectrum-dli', 'wmla-license',)): path = os.path.join('/srv/', _dir, '') if os.path.isdir(path): print(f'Found dir: {path}') if action == 'move': try: _dir = f'{base_dir}/wmla120-{arch}/' move(path, _dir) cmd = f'sudo chcon -Rv --type=httpd_sys_content_t {_dir}' _, err, rc = sub_proc_exec(cmd) if rc != 0: log.error(f'chtype of directory {_dir} failed {err}') except shutil_Error as exc: print(exc) elif action == 'copy': cmd = f'cp -rf {path} {base_dir}/wmla120-{arch}/' try: _, err, rc = sub_proc_exec(cmd) except: pass if rc != 0: log.error('Copy error {err}') else: log.error(f'Path {path} missing') print('Done')
def get_repo_url(self, url, alt_url=None, name='', contains=[], excludes=[], filelist=[]): """Allows the user to choose the default url or enter an alternate Inputs: repo_url: (str) URL or metalink for the default external repo source alt_url: (str) An alternate url that the user can modify contains: (list of strings) Filter criteria. The presented list is restricted to those urls that contain elements from 'contains' and no elements of 'excludes'. excludes: (list of strings) filelist: (list of strings) Can be globs. Used to validate a repo. The specified files must be present """ if name: print(f'\nChoice for source of {name} repository:') if url: sel_txt = 'Public mirror.Alternate web site' sel_chcs = 'P.A' else: sel_txt = 'Alternate web site' sel_chcs = 'A' _url = None while _url is None: ch, item = get_selection(sel_txt, sel_chcs, 'Choice: ', '.', allow_none=True) if ch == 'P': _url = url break if ch == 'A': if not alt_url: alt_url = f'http://host/repos/{self.repo_id}/' _url = get_url(alt_url, prompt_name=self.repo_name, repo_chk=self.repo_type, contains=contains, excludes=excludes, filelist=filelist) if _url and _url[-1] != '/': _url = _url + '/' break elif ch == 'N': _url = None break return _url
def main(args): log = logger.getlogger() log.debug('log this') user_input = len(args) > 0 if user_input: args = parse_input(args) dep_base_path = gen.get_dependencies_path() dirs = [ d for d in os.listdir(dep_base_path) if os.path.isdir(os.path.join(dep_base_path, d)) ] dirs = [os.path.join(dep_base_path, _dir) for _dir in dirs] dep_dir = "" if user_input: dep_dir = dirs[0] if not user_input: ch, dep_dir = get_selection(dirs, prompt='Select a directory to aggregate ' 'dependencies from: ') dep_files = {} pip_pre_files = [ 'client_pip_pre_install.txt', 'dlipy3_pip_pre_install.txt', 'dlipy2_pip_pre_install.txt', 'dlinsights_pip_pre_install.txt', ] dep_files['pip_pre_files'] = pip_pre_files pip_post_files = [ 'client_pip_post_install.txt', 'dlipy3_pip_post_install.txt', 'dlipy2_pip_post_install.txt', 'dlinsights_pip_post_install.txt', ] dep_files['pip_post_files'] = pip_post_files conda_pre_files = [ 'dlipy3_conda_pre_install.txt', 'dlipy2_conda_pre_install.txt', 'dlinsights_conda_pre_install.txt', ] dep_files['conda_pre_files'] = conda_pre_files conda_post_files = [ 'dlipy3_conda_post_install.txt', 'dlipy2_conda_post_install.txt', 'dlinsights_conda_post_install.txt', ] dep_files['conda_post_files'] = conda_post_files yum_pre_files = ['client_yum_pre_install.txt'] dep_files['yum_pre_files'] = yum_pre_files yum_post_files = ['client_yum_post_install.txt'] dep_files['yum_post_files'] = yum_post_files exists = glob.glob(f'{dep_dir}/**/{yum_pre_files[0]}', recursive=True) if exists: dep_dir = os.path.dirname(exists[0]) else: log.error('No client yum pre file found') sys.exit() # # Change file ownership to current user # if not os.access(dep_dir, os.W_OK): username = getpass.getuser() cmd = f'sudo chown -R {username}:{username} {dep_dir}' sub_proc_exec(cmd, shell=True) # Clear comments and other known header content from files for item in dep_files: for _file in dep_files[item]: _file_path = os.path.join(dep_dir, _file) with open(_file_path, 'r') as f: lines = f.read().splitlines() with open(_file_path, 'w') as f: for line in lines: if line.startswith('#') or line.startswith('@'): continue else: f.write(line + '\n') def file_check(file_list): for f in file_list: file_path = os.path.join(dep_dir, f) my_file = os.path.isfile(file_path) if my_file: pass else: input(f'\nINFO - {f} Does not exist\n') def get_pkg_repo(pkg, pkg_type): if pkg_type == 'yum': pkg_items = pkg.split() repo = pkg_items[2] if repo.endswith('-powerup'): repo = repo[:-8] elif pkg_type == 'pip': pkg_items = pkg.split() repo = pkg_items[2] if pkg_type in repo: repo = 'pypi' elif pkg_type == 'conda': pkg_dir = pkg.rpartition('/')[0] if 'ibm-ai' in pkg or 'ibmai' in pkg: if 'linux-ppc64le' in pkg: repo = 'ibmai_linux_ppc64le' elif 'noarch' in pkg: repo = 'ibmai_noarch' elif 'x86_64' in pkg: repo = 'ibmai_linux_x86_64' else: repo = 'ibm_ai_unresolved_reponame' elif 'repo.anaconda' in pkg: repo = '-'.join(pkg_dir.rsplit('/', 2)[-2:]) repo = 'anaconda_' + repo.replace('-', '_') else: pkg_dir = pkg.rpartition('/')[0] repo = '_'.join(pkg_dir.rsplit('/', 2)[-2:]) return repo def format_pkg_name(pkg, pkg_type): if pkg_type == 'yum': pkg_items = pkg.split() pkg_repo = get_pkg_repo(pkg, pkg_type) pkg_fmt_name = (pkg_items[0].rsplit('.', 1)[0] + '-' + pkg_items[1] + '.' + pkg_items[0].rsplit('.', 1)[1]) elif pkg_type == 'conda': pkg_fmt_name = pkg.rpartition('/')[-1] pkg_repo = get_pkg_repo(pkg, pkg_type) elif pkg_type == 'pip': pkg_items = pkg.split() pkg_repo = get_pkg_repo(pkg, pkg_type) version = pkg_items[1].replace('(', '') version = version.replace(')', '') pkg_fmt_name = pkg_items[0] + '==' + version return pkg_fmt_name, pkg_repo def write_merged_files(merged_sets, pkg_type): repo_list = {} if pkg_type == 'yum': repo_list[pkg_type] = [] for repo in merged_sets: file_name = repo.replace('/', '') file_name = file_name.replace('@', '') file_name = f'{file_name}.yml' file_path = os.path.join(dep_dir, file_name) with open(file_path, 'w') as f: d = {file_name: sorted(merged_sets[repo], key=str.lower)} repo_list[pkg_type].append({ "path": file_path, "filename": file_name, "hash": d }) yaml.dump(d, f, indent=4, default_flow_style=False) elif pkg_type == 'conda': repo_list[pkg_type] = [] for repo in merged_sets: file_name = f'{repo}.yml' file_path = os.path.join(dep_dir, file_name) with open(file_path, 'w') as f: d = { file_name: sorted(list(merged_sets[repo]), key=str.lower) } repo_list[pkg_type].append({ "path": file_path, "filename": file_name, "hash": d }) yaml.dump(d, f, indent=4, default_flow_style=False) elif pkg_type == 'pip': repo_list[pkg_type] = [] for repo in merged_sets: file_name = 'pypi.yml' file_path = os.path.join(dep_dir, file_name) with open(file_path, 'w') as f: d = {file_name: sorted(merged_sets[repo], key=str.lower)} repo_list[pkg_type].append({ "path": file_path, "filename": file_name, "hash": d }) yaml.dump(d, f, indent=4, default_flow_style=False) return repo_list def get_repo_list(pkgs, pkg_type): repo_list = [] if pkg_type == 'yum': for pkg in pkgs: repo = get_pkg_repo(pkg, pkg_type) if repo not in repo_list: repo_list.append(repo) if pkg_type == 'conda': for pkg in pkgs: repo = get_pkg_repo(pkg, pkg_type) if repo not in repo_list: repo_list.append(repo) if pkg_type == 'pip': for pkg in pkgs: if '<pip>' in pkg: repo = get_pkg_repo(pkg, pkg_type) if repo not in repo_list: repo_list.append(repo) return repo_list def merge_function(pre_files, post_files, pkg_type): """ Merges packages of a given type listed in a collection of files collected 'post' installation and 'pre' installation for various environments. The merged set of 'pre' packages is removed from the merge set of 'post' packages to arrive at the list of installed packages across all environments. """ # generate pre paths pre_paths = [] for file in pre_files: pre_paths.append(os.path.join(dep_dir, file)) # Generate post paths post_paths = [] for file in post_files: post_paths.append(os.path.join(dep_dir, file)) # Loop through the files pkgs = {} # # {file:{repo:{pre:[], post: []} for i, pre_file in enumerate(pre_paths): file_name = os.path.basename(pre_file) file_key = file_name.split('_')[0] + '_' + file_name.split('_')[1] pkgs[file_key] = {} post_file = post_paths[i] try: with open(pre_file, 'r') as f: pre_pkgs = f.read().splitlines() except FileNotFoundError as exc: print(f'File not found: {pre_file}. Err: {exc}') try: with open(post_file, 'r') as f: post_pkgs = f.read().splitlines() except FileNotFoundError as exc: print(f'File not found: {post_file}. Err: {exc}') # Get the repo list repo_list = get_repo_list(post_pkgs, pkg_type) for repo in repo_list: pkgs[file_key][repo] = {} pkgs[file_key][repo]['pre'] = [] pkgs[file_key][repo]['post'] = [] for pkg in pre_pkgs: pkg_fmt_name, pkg_repo = format_pkg_name(pkg, pkg_type) if pkg_repo == repo: pkgs[file_key][repo]['pre'].append(pkg_fmt_name) for pkg in post_pkgs: # Format the name pkg_fmt_name, pkg_repo = format_pkg_name(pkg, pkg_type) if pkg_repo == repo: pkgs[file_key][repo]['post'].append(pkg_fmt_name) diff_sets = {} # Post - pre pkg sets. (may need adjustment for different repo type) for _file in pkgs: diff_sets[_file] = {} for repo in pkgs[_file]: post_minus_pre = set(pkgs[_file][repo]['post']) # - # set(pkgs[_file][repo]['pre'])) diff_sets[_file][repo] = post_minus_pre # Merge by repository merged_sets = {} for _file in diff_sets: for repo in diff_sets[_file]: if repo not in merged_sets: merged_sets[repo] = set() merged_sets[repo] = merged_sets[repo] | diff_sets[_file][repo] return write_merged_files(merged_sets, pkg_type) file_check(yum_pre_files) file_check(yum_post_files) main_repo_list = merge_function(yum_pre_files, yum_post_files, 'yum') file_check(conda_pre_files) file_check(conda_post_files) conda_repo_list = merge_function(conda_pre_files, conda_post_files, 'conda') merge_dicts(conda_repo_list, main_repo_list) file_check(pip_pre_files) file_check(pip_post_files) pip_repo_list = merge_function(pip_pre_files, pip_post_files, 'pip') merge_dicts(pip_repo_list, main_repo_list) software_type = args.software if user_input else None proc_family = "" if software_type: try: file_path = GEN_SOFTWARE_PATH + SOFT_FILE yaml_file = load_yamlfile(file_path) proc_family = "_" + yaml_file["proc_family"] except: proc_family = "" pass lists, arch = parse_pkg_list(main_repo_list, software_type, proc_family) generate_pkg_list(lists, software_type, arch, dep_dir)
def get_ansible_inventory(): log = logger.getlogger() inventory_choice = None dynamic_inventory_path = get_dynamic_inventory_path() software_hosts_file_path = ( os.path.join(get_playbooks_path(), 'software_hosts')) heading1("Software hosts inventory setup\n") dynamic_inventory = None # If dynamic inventory contains clients prompt user to use it if (dynamic_inventory is not None and len(set(_get_hosts_list(dynamic_inventory)) - set(['deployer', 'localhost'])) > 0): print("Ansible Dynamic Inventory found:") print("--------------------------------") print(_get_groups_hosts_string(dynamic_inventory)) print("--------------------------------") validate_software_inventory(dynamic_inventory) if click.confirm('Do you want to use this inventory?'): print("Using Ansible Dynamic Inventory") inventory_choice = dynamic_inventory_path else: print("NOT using Ansible Dynamic Inventory") # If dynamic inventory has no hosts or user declines to use it if inventory_choice is None: while True: # Check if software inventory file exists if os.path.isfile(software_hosts_file_path): print("Software inventory file found at '{}':" .format(software_hosts_file_path)) # If no software inventory file exists create one using template else: rlinput("Press enter to create client node inventory") _create_new_software_inventory(software_hosts_file_path) # If still no software inventory file exists prompt user to # exit (else start over to create one). if not os.path.isfile(software_hosts_file_path): print("No inventory file found at '{}'" .format(software_hosts_file_path)) if click.confirm('Do you want to exit the program?'): sys.exit(1) else: continue # Menu items can modified to show validation results continue_msg = 'Continue with current inventory' edit_msg = 'Edit inventory file' exit_msg = 'Exit program' ssh_config_msg = 'Configure Client Nodes for SSH Key Access' menu_items = [] # Validate software inventory inv_count = len(_validate_inventory_count(software_hosts_file_path, 0)) print(f'Validating software inventory ({inv_count} nodes)...') if validate_software_inventory(software_hosts_file_path): print(bold("Validation passed!")) else: print(bold("Unable to complete validation")) continue_msg = ("Continue with inventory as-is - " "WARNING: Validation incomplete") menu_items.append(ssh_config_msg) # Prompt user menu_items += [continue_msg, edit_msg, exit_msg] choice, item = get_selection(menu_items) print(f'Choice: {choice} Item: {item}') if item == ssh_config_msg: configure_ssh_keys(software_hosts_file_path) elif item == continue_msg: print("Using '{}' as inventory" .format(software_hosts_file_path)) inventory_choice = software_hosts_file_path break elif item == edit_msg: click.edit(filename=software_hosts_file_path) elif item == exit_msg: sys.exit(1) if inventory_choice is None: log.error("Software inventory file is required to continue!") sys.exit(1) log.debug("User software inventory choice: {}".format(inventory_choice)) return inventory_choice
def configure_ssh_keys(software_hosts_file_path): """Configure SSH keys for Ansible software hosts Scan for SSH key pairs in home directory, and if called using 'sudo' also in "login" user's home directory. Allow user to create a new SSH key pair if 'default_ssh_key_name' doesn't already exist. If multiple choices are available user will be prompted to choose. Selected key pair is copied into "login" user's home '.ssh' directory if necessary. Selected key pair is then copied to all hosts listed in 'software_hosts' file via 'ssh-copy-id', and finally assigned to the 'ansible_ssh_private_key_file' var in the 'software_hosts' '[all:vars]' section. Args: software_hosts_file_path (str): Path to software inventory file """ log = logger.getlogger() default_ssh_key_name = "powerup" ssh_key_options = get_existing_ssh_key_pairs(no_root_keys=True) user_name, user_home_dir = get_user_and_home() if os.path.join(user_home_dir, ".ssh", default_ssh_key_name) not in ssh_key_options: ssh_key_options.insert(0, 'Create New "powerup" Key Pair') if len(ssh_key_options) == 1: item = ssh_key_options[0] elif len(ssh_key_options) > 1: print(bold("\nSelect an SSH key to use:")) choice, item = get_selection(ssh_key_options) if item == 'Create New "powerup" Key Pair': ssh_key = create_ssh_key_pair(default_ssh_key_name) else: ssh_key = item ssh_key = copy_ssh_key_pair_to_user_dir(ssh_key) add_software_hosts_global_var( software_hosts_file_path, "ansible_ssh_common_args='-o StrictHostKeyChecking=no'") hostvars = get_ansible_hostvars(software_hosts_file_path) run = True while run: global_user = None global_pass = None header_printed = False header_msg = bold('\nGlobal client SSH login credentials required') for host in _validate_inventory_count(software_hosts_file_path, 0): if global_user is None and 'ansible_user' not in hostvars[host]: print(header_msg) header_printed = True global_user = rlinput('username: '******'ansible_user={global_user}') if (global_pass is None and 'ansible_ssh_pass' not in hostvars[host]): if not header_printed: print(header_msg) global_pass = getpass('password: '******'Retry', 'Continue', 'Exit']) if choice == "1": pass elif choice == "2": run = False elif choice == "3": log.debug('User chooses to exit.') sys.exit('Exiting') else: print() log.info("SSH key successfully copied to all hosts\n") run = False add_software_hosts_global_var(software_hosts_file_path, f'ansible_ssh_private_key_file={ssh_key}')
def setup_source_file(name, src_glob, url='', alt_url='http://', dest_dir=None, src2=None): """Interactive selection of a source file and copy it to the /srv/<dest_dir> directory. The source file can include file globs and can come from a URL or the local disk. Local disk searching starts in the /home and /root directory and then expands to the entire file system if no matches found in those directories. URLs must point to the directory with the file or a parent directory. Inputs: src_glob (str): Source file name to look for. Can include file globs src2(str): An additional file to be copied from the same source as src_glob. This file would typically be a support file such as an entitlement file. dest (str) : destination directory. Will be created if necessary under /srv/ url (str): url for the public web site where the file can be obtained. leave empty to prevent prompting for a public url option. alt_url (str): Alternate url where the file can be found. Usually this is an intranet web site. name (str): Name for the source. Used for prompts and dest dir (/srv/{name}). Returns: state (bool) : state is True if a file matching the src_name exists in the dest directory or was succesfully copied there. state is False if there is no file matching src_name in the dest directory OR if the attempt to copy a new file to the dest directory failed. src_path (str) : The path for the file found / chosen by the user. If only a single match is found it is used without choice and returned. dest_path (str) """ src_path = None dest_path = None log = logger.getlogger() name_src = get_name_dir(name) exists = glob.glob(f'/srv/{name_src}/**/{src_glob}', recursive=True) if exists: dest_path = exists[0] copied = False ch = '' while not copied: ch, item = get_selection('Copy from URL\nSearch local Disk', 'U\nD', allow_none=True) if ch == 'U': _url = alt_url if alt_url else 'http://' if url: ch1, item = get_selection('Public web site.Alternate web site', 'P.A', 'Select source: ', '.') if ch1 == 'P': _url = url rc = -9 while _url is not None and rc != 0: _url = get_url(_url, fileglob=src_glob) if _url: dest_dir = f'/srv/{name_src}' if not os.path.exists(dest_dir): os.mkdir(dest_dir) cmd = f'wget -r -l 1 -nH -np --cut-dirs=1 -P {dest_dir} {_url}' rc = sub_proc_display(cmd) if rc != 0: log.error(f'Failed downloading {name} source to' f' /srv/{name_src}/ directory. \n{rc}') copied = False else: src_path = _url dest_path = os.path.join(dest_dir, os.path.basename(_url)) copied = True if src2: _url2 = os.path.join(os.path.dirname(_url), src2) cmd = f'wget -r -l 1 -nH -np --cut-dirs=1 -P {dest_dir} {_url2}' rc = sub_proc_display(cmd) if rc != 0: log.error( f'Failed downloading {name} source file {src2} to' f' /srv/{name_src}/ directory. \n{rc}') copied = False else: src_path = _url copied = copied and True elif ch == 'D': src_path = get_src_path(src_glob) if src_path: dest_dir = f'/srv/{name_src}' if not os.path.exists(dest_dir): os.mkdir(dest_dir) try: copy2(src_path, dest_dir) except Error as err: log.debug( f'Failed copying {name} source file to /srv/{name_src}/ ' f'directory. \n{err}') copied = False else: log.info(f'Successfully installed {name} source file ' 'into the POWER-Up software server.') dest_path = os.path.join(dest_dir, os.path.basename(src_path)) copied = True if src2: try: src2_path = os.path.join(os.path.dirname(src_path), src2) copy2(src2_path, dest_dir) except Error as err: log.debug( f'Failed copying {name} source file to /srv/{name_src}/ ' f'directory. \n{err}') copied = False else: log.info( f'Successfully installed {name} source file {src2} ' 'into the POWER-Up software server.') copied = copied and True elif ch == 'N': log.info(f'No {name.capitalize()} source file copied to POWER-Up ' 'server directory') break return src_path, dest_path, copied