def path_dwim_relative_stack(self, paths, dirname, source): ''' find one file in first path in stack taking roles into account and adding play basedir as fallback :arg paths: A list of text strings which are the paths to look for the filename in. :arg dirname: A text string representing a directory. The directory is prepended to the source to form the path to search for. :arg source: A text string which is the filename to search for :rtype: A text string :returns: An absolute path to the filename ``source`` ''' b_dirname = to_bytes(dirname) b_source = to_bytes(source) result = None if source is None: display.warning('Invalid request to find a file that matches a "null" value') elif source and (source.startswith('~') or source.startswith(os.path.sep)): # path is absolute, no relative needed, check existence and return source test_path = unfrackpath(b_source) if os.path.exists(to_bytes(test_path, errors='surrogate_or_strict')): result = test_path else: search = [] for path in paths: upath = unfrackpath(path) b_upath = to_bytes(upath, errors='surrogate_or_strict') b_mydir = os.path.dirname(b_upath) # if path is in role and 'tasks' not there already, add it into the search if b_upath.endswith(b'tasks') and os.path.exists(os.path.join(b_upath, b'main.yml')) \ or os.path.exists(os.path.join(b_upath, b'tasks/main.yml')) \ or os.path.exists(os.path.join(b_mydir, b'tasks/main.yml')): if b_mydir.endswith(b'tasks'): search.append(os.path.join(os.path.dirname(b_mydir), b_dirname, b_source)) search.append(os.path.join(b_mydir, b_source)) else: # don't add dirname if user already is using it in source if b_source.split(b'/')[0] == b_dirname: search.append(os.path.join(b_upath, b_source)) else: search.append(os.path.join(b_upath, b_dirname, b_source)) search.append(os.path.join(b_upath, b'tasks', b_source)) elif b_dirname not in b_source.split(b'/'): # don't add dirname if user already is using it in source search.append(os.path.join(b_upath, b_dirname, b_source)) search.append(os.path.join(b_upath, b_source)) # always append basedir as last resort search.append(os.path.join(to_bytes(self.get_basedir()), b_dirname, b_source)) search.append(os.path.join(to_bytes(self.get_basedir()), b_source)) display.debug(u'search_path:\n\t%s' % to_text(b'\n\t'.join(search))) for b_candidate in search: display.vvvvv(u'looking for "%s" at "%s"' % (source, to_text(b_candidate))) if os.path.exists(b_candidate): result = to_text(b_candidate) break return result
def _load_role_path(self, role_name): ''' the 'role', as specified in the ds (or as a bare string), can either be a simple name or a full path. If it is a full path, we use the basename as the role name, otherwise we take the name as-given and append it to the default role path ''' role_path = unfrackpath(role_name) if self._loader.path_exists(role_path): role_name = os.path.basename(role_name) return (role_name, role_path) else: # we always start the search for roles in the base directory of the playbook role_search_paths = [ os.path.join(self._loader.get_basedir(), u'roles'), u'./roles', self._loader.get_basedir(), u'./' ] # also search in the configured roles path if C.DEFAULT_ROLES_PATH: configured_paths = C.DEFAULT_ROLES_PATH.split(os.pathsep) role_search_paths.extend(configured_paths) # finally, append the roles basedir, if it was set, so we can # search relative to that directory for dependent roles if self._role_basedir: role_search_paths.append(self._role_basedir) # create a templar class to template the dependency names, in # case they contain variables if self._variable_manager is not None: all_vars = self._variable_manager.get_vars(loader=self._loader, play=self._play) else: all_vars = dict() templar = Templar(loader=self._loader, variables=all_vars) role_name = templar.template(role_name) # now iterate through the possible paths and return the first one we find for path in role_search_paths: path = templar.template(path) role_path = unfrackpath(os.path.join(path, role_name)) if self._loader.path_exists(role_path): return (role_name, role_path) # FIXME: make the parser smart about list/string entries in # the yaml so the error line/file can be reported here raise AnsibleError("the role '%s' was not found in %s" % (role_name, ":".join(role_search_paths)))
def unfrack_paths(option, opt, value, parser): paths = getattr(parser.values, option.dest) if paths is None: paths = [] if isinstance(value, string_types): paths[:0] = [unfrackpath(x) for x in value.split(os.pathsep) if x] elif isinstance(value, list): paths[:0] = [unfrackpath(x) for x in value if x] else: pass # FIXME: should we raise options error? setattr(parser.values, option.dest, paths)
def _load_role_path(self, role_name): ''' the 'role', as specified in the ds (or as a bare string), can either be a simple name or a full path. If it is a full path, we use the basename as the role name, otherwise we take the name as-given and append it to the default role path ''' # we always start the search for roles in the base directory of the playbook role_search_paths = [ os.path.join(self._loader.get_basedir(), u'roles'), ] # also search in the configured roles path if C.DEFAULT_ROLES_PATH: role_search_paths.extend(C.DEFAULT_ROLES_PATH) # next, append the roles basedir, if it was set, so we can # search relative to that directory for dependent roles if self._role_basedir: role_search_paths.append(self._role_basedir) # finally as a last resort we look in the current basedir as set # in the loader (which should be the playbook dir itself) but without # the roles/ dir appended role_search_paths.append(self._loader.get_basedir()) # create a templar class to template the dependency names, in # case they contain variables if self._variable_manager is not None: all_vars = self._variable_manager.get_vars(play=self._play) else: all_vars = dict() templar = Templar(loader=self._loader, variables=all_vars) role_name = templar.template(role_name) # now iterate through the possible paths and return the first one we find for path in role_search_paths: path = templar.template(path) role_path = unfrackpath(os.path.join(path, role_name)) if self._loader.path_exists(role_path): return (role_name, role_path) # if not found elsewhere try to extract path from name role_path = unfrackpath(role_name) if self._loader.path_exists(role_path): role_name = os.path.basename(role_name) return (role_name, role_path) raise AnsibleError("the role '%s' was not found in %s" % (role_name, ":".join(role_search_paths)), obj=self._ds)
def path_dwim_relative(self, path, dirname, source): ''' find one file in either a role or playbook dir with or without explicitly named dirname subdirs Used in action plugins and lookups to find supplemental files that could be in either place. ''' search = [] isrole = False # I have full path, nothing else needs to be looked at if source.startswith('~') or source.startswith('/'): search.append(self.path_dwim(source)) else: # base role/play path + templates/files/vars + relative filename search.append(os.path.join(path, dirname, source)) basedir = unfrackpath(path) # is it a role and if so make sure you get correct base path if path.endswith('tasks') and os.path.exists(to_bytes(os.path.join(path,'main.yml'), errors='strict')) \ or os.path.exists(to_bytes(os.path.join(path,'tasks/main.yml'), errors='strict')): isrole = True if path.endswith('tasks'): basedir = unfrackpath(os.path.dirname(path)) cur_basedir = self._basedir self.set_basedir(basedir) # resolved base role/play path + templates/files/vars + relative filename search.append(self.path_dwim(os.path.join(basedir, dirname, source))) self.set_basedir(cur_basedir) if isrole and not source.endswith(dirname): # look in role's tasks dir w/o dirname search.append(self.path_dwim(os.path.join(basedir, 'tasks', source))) # try to create absolute path for loader basedir + templates/files/vars + filename search.append(self.path_dwim(os.path.join(dirname,source))) search.append(self.path_dwim(os.path.join(basedir, source))) # try to create absolute path for loader basedir + filename search.append(self.path_dwim(source)) for candidate in search: if os.path.exists(to_bytes(candidate, errors='strict')): break return candidate
def path_dwim_relative(self, role_path, dirname, source): ''' find one file in a directory one level up in a dir named dirname relative to current ''' basedir = os.path.dirname(role_path) if os.path.islink(basedir): basedir = unfrackpath(basedir) template2 = os.path.join(basedir, dirname, source) else: template2 = os.path.join(basedir, '..', dirname, source) source1 = os.path.join(role_path, dirname, source) if os.path.exists(source1): return source1 cur_basedir = self._basedir self.set_basedir(basedir) source2 = self.path_dwim(template2) if os.path.exists(source2): self.set_basedir(cur_basedir) return source2 obvious_local_path = self.path_dwim(source) if os.path.exists(obvious_local_path): self.set_basedir(cur_basedir) return obvious_local_path self.set_basedir(cur_basedir) return source2 # which does not exist
def run(self): super(ConfigCLI, self).run() if self.options.config_file: self.config_file = unfrackpath(self.options.config_file, follow=False) self.config = ConfigManager(self.config_file) else: self.config = ConfigManager() self.config_file = find_ini_config_file() if self.config_file: try: if not os.path.exists(self.config_file): raise AnsibleOptionsError("%s does not exist or is not accessible" % (self.config_file)) elif not os.path.isfile(self.config_file): raise AnsibleOptionsError("%s is not a valid file" % (self.config_file)) os.environ['ANSIBLE_CONFIG'] = to_native(self.config_file) except: if self.action in ['view']: raise elif self.action in ['edit', 'update']: display.warning("File does not exist, used empty file: %s" % self.config_file) elif self.action == 'view': raise AnsibleError('Invalid or no config file was supplied') self.execute()
def path_dwim_relative_stack(self, paths, dirname, source): ''' find one file in first path in stack taking roles into account and adding play basedir as fallback ''' result = None if not source: display.warning('Invalid request to find a file that matches an empty string or "null" value') elif source.startswith('~') or source.startswith(os.path.sep): # path is absolute, no relative needed, check existence and return source test_path = to_bytes(unfrackpath(source),errors='strict') if os.path.exists(test_path): result = test_path else: search = [] for path in paths: upath = unfrackpath(path) mydir = os.path.dirname(upath) # if path is in role and 'tasks' not there already, add it into the search if upath.endswith('tasks') and os.path.exists(to_bytes(os.path.join(upath,'main.yml'), errors='strict')) \ or os.path.exists(to_bytes(os.path.join(upath,'tasks/main.yml'), errors='strict')) \ or os.path.exists(to_bytes(os.path.join(os.path.dirname(upath),'tasks/main.yml'), errors='strict')): if mydir.endswith('tasks'): search.append(os.path.join(os.path.dirname(mydir), dirname, source)) search.append(os.path.join(mydir, source)) else: search.append(os.path.join(upath, dirname, source)) search.append(os.path.join(upath, 'tasks', source)) elif dirname not in source.split('/'): # don't add dirname if user already is using it in source search.append(os.path.join(upath, dirname, source)) search.append(os.path.join(upath, source)) # always append basedir as last resort search.append(os.path.join(self.get_basedir(), dirname, source)) search.append(os.path.join(self.get_basedir(), source)) display.debug('search_path:\n\t' + '\n\t'.join(search)) for candidate in search: display.vvvvv('looking for "%s" at "%s"' % (source, candidate)) if os.path.exists(to_bytes(candidate, errors='strict')): result = candidate break return result
def _update_connection_state(self): ''' Reconstruct the connection socket_path and check if it exists If the socket path exists then the connection is active and set both the _socket_path value to the path and the _connected value to True. If the socket path doesn't exist, leave the socket path value to None and the _connected value to False ''' ssh = connection_loader.get('ssh', class_only=True) cp = ssh._create_control_path(self._play_context.remote_addr, self._play_context.port, self._play_context.remote_user, self._play_context.connection, self._ansible_playbook_pid) tmp_path = unfrackpath(C.PERSISTENT_CONTROL_PATH_DIR) socket_path = unfrackpath(cp % dict(directory=tmp_path)) if os.path.exists(socket_path): self._connected = True self._socket_path = socket_path
def _update_connection_state(self): ''' Reconstruct the connection socket_path and check if it exists If the socket path exists then the connection is active and set both the _socket_path value to the path and the _connected value to True. If the socket path doesn't exist, leave the socket path value to None and the _connected value to False ''' ssh = connection_loader.get('ssh', class_only=True) control_path = ssh._create_control_path(self._play_context.remote_addr, self._play_context.port, self._play_context.remote_user, self._play_context.connection, self._ansible_playbook_pid) tmp_path = unfrackpath(C.PERSISTENT_CONTROL_PATH_DIR) socket_path = unfrackpath(control_path % dict(directory=tmp_path)) if os.path.exists(socket_path): self._connected = True self._socket_path = socket_path
def find_ini_config_file(self): ''' Load Config File order(first found is used): ENV, CWD, HOME, /etc/ansible ''' path0 = os.getenv("ANSIBLE_CONFIG", None) if path0 is not None: path0 = unfrackpath(path0, follow=False) if os.path.isdir(path0): path0 += "/ansible.cfg" try: path1 = os.getcwd() + "/ansible.cfg" except OSError: path1 = None path2 = unfrackpath("~/.ansible.cfg", follow=False) path3 = "/etc/ansible/ansible.cfg" for path in [path0, path1, path2, path3]: if path is not None and os.path.exists(path): break else: path = None return path
def path_dwim_relative(self, path, dirname, source, is_role=False): ''' find one file in either a role or playbook dir with or without explicitly named dirname subdirs Used in action plugins and lookups to find supplemental files that could be in either place. ''' search = [] source = to_text(source, errors='surrogate_or_strict') # I have full path, nothing else needs to be looked at if source.startswith(to_text(os.path.sep)) or source.startswith(u'~'): search.append(unfrackpath(source, follow=False)) else: # base role/play path + templates/files/vars + relative filename search.append(os.path.join(path, dirname, source)) basedir = unfrackpath(path, follow=False) # not told if role, but detect if it is a role and if so make sure you get correct base path if not is_role: is_role = self._is_role(path) if is_role and RE_TASKS.search(path): basedir = unfrackpath(os.path.dirname(path), follow=False) cur_basedir = self._basedir self.set_basedir(basedir) # resolved base role/play path + templates/files/vars + relative filename search.append(unfrackpath(os.path.join(basedir, dirname, source), follow=False)) self.set_basedir(cur_basedir) if is_role and not source.endswith(dirname): # look in role's tasks dir w/o dirname search.append(unfrackpath(os.path.join(basedir, 'tasks', source), follow=False)) # try to create absolute path for loader basedir + templates/files/vars + filename search.append(unfrackpath(os.path.join(dirname, source), follow=False)) # try to create absolute path for loader basedir search.append(unfrackpath(os.path.join(basedir, source), follow=False)) # try to create absolute path for dirname + filename search.append(self.path_dwim(os.path.join(dirname, source))) # try to create absolute path for filename search.append(self.path_dwim(source)) for candidate in search: if os.path.exists(to_bytes(candidate, errors='surrogate_or_strict')): break return candidate
def find_ini_config_file(): ''' Load INI Config File order(first found is used): ENV, CWD, HOME, /etc/ansible ''' # FIXME: eventually deprecate ini configs path0 = os.getenv("ANSIBLE_CONFIG", None) if path0 is not None: path0 = unfrackpath(path0, follow=False) if os.path.isdir(path0): path0 += "/ansible.cfg" try: path1 = os.getcwd() + "/ansible.cfg" except OSError: path1 = None path2 = unfrackpath("~/.ansible.cfg", follow=False) path3 = "/etc/ansible/ansible.cfg" for path in [path0, path1, path2, path3]: if path is not None and os.path.exists(path): break else: path = None return path
def _load_role_path(self, role_name): ''' the 'role', as specified in the ds (or as a bare string), can either be a simple name or a full path. If it is a full path, we use the basename as the role name, otherwise we take the name as-given and append it to the default role path ''' role_path = unfrackpath(role_name) if self._loader.path_exists(role_path): role_name = os.path.basename(role_name) return (role_name, role_path) else: # we always start the search for roles in the base directory of the playbook role_search_paths = [os.path.join(self._loader.get_basedir(), 'roles'), './roles', './'] # also search in the configured roles path if C.DEFAULT_ROLES_PATH: configured_paths = C.DEFAULT_ROLES_PATH.split(os.pathsep) role_search_paths.extend(configured_paths) # finally, append the roles basedir, if it was set, so we can # search relative to that directory for dependent roles if self._role_basedir: role_search_paths.append(self._role_basedir) # now iterate through the possible paths and return the first one we find for path in role_search_paths: role_path = unfrackpath(os.path.join(path, role_name)) if self._loader.path_exists(role_path): return (role_name, role_path) # FIXME: make the parser smart about list/string entries in # the yaml so the error line/file can be reported here raise AnsibleError("the role '%s' was not found" % role_name)
def path_dwim(self, given): ''' make relative paths work like folks expect. ''' given = unquote(given) given = to_text(given, errors='surrogate_or_strict') if given.startswith(to_text(os.path.sep)) or given.startswith(u'~'): path = given else: basedir = to_text(self._basedir, errors='surrogate_or_strict') path = os.path.join(basedir, given) return unfrackpath(path, follow=False)
def put_file(self, in_path, out_path): ''' transfer a file from local to local ''' super(Connection, self).put_file(in_path, out_path) in_path = unfrackpath(in_path, basedir=self.cwd) out_path = unfrackpath(out_path, basedir=self.cwd) display.vvv(u"PUT {0} TO {1}".format(in_path, out_path), host=self._play_context.remote_addr) if not os.path.exists(to_bytes(in_path, errors='surrogate_or_strict')): raise AnsibleFileNotFound( "file or module does not exist: {0}".format( to_native(in_path))) try: shutil.copyfile(to_bytes(in_path, errors='surrogate_or_strict'), to_bytes(out_path, errors='surrogate_or_strict')) except shutil.Error: raise AnsibleError( "failed to copy: {0} and {1} are the same".format( to_native(in_path), to_native(out_path))) except IOError as e: raise AnsibleError("failed to transfer file to {0}: {1}".format( to_native(out_path), to_native(e)))
def __init__(self, options, args=None): self.options = options self.args = args if hasattr(self.options, 'tags') and not self.options.tags: self.options.tags = ['all'] if hasattr(self.options, 'tags') and self.options.tags: tags = set() for tag_set in self.options.tags: for tag in tag_set.split(u','): tags.add(tag.strip()) self.options.tags = list(tags) # process skip_tags if hasattr(self.options, 'skip_tags') and self.options.skip_tags: skip_tags = set() for tag_set in self.options.skip_tags: for tag in tag_set.split(u','): skip_tags.add(tag.strip()) self.options.skip_tags = list(skip_tags) # process inventory options except for CLIs that require their own processing if hasattr(self.options, 'inventory') and not self.SKIP_INVENTORY_DEFAULTS: if self.options.inventory: # should always be list if isinstance(self.options.inventory, string_types): self.options.inventory = [self.options.inventory] # Ensure full paths when needed self.inventory = [] if ',' not in self.options.inventory: for opt in self.options.inventory: self.inventory.append(opt) else: self.inventory.append(self.options.inventory) self.options.inventory = [ unfrackpath(opt, follow=False) if ',' not in opt else opt for opt in self.options.inventory ] else: self.options.inventory = C.DEFAULT_HOST_LIST self.inventory = C.DEFAULT_HOST_LIST
def _is_role(self, path): ''' imperfect role detection, roles are still valid w/o main.yml/yaml/etc ''' isit = False b_path = to_bytes(path, errors='surrogate_or_strict') b_upath = to_bytes(unfrackpath(path), errors='surrogate_or_strict') for suffix in (b'.yml', b'.yaml', b''): b_main = b'main%s' % (suffix) b_tasked = b'tasks/%s' % (b_main) if b_path.endswith(b'tasks') and os.path.exists(os.path.join(b_path, b_main)) \ or os.path.exists(os.path.join(b_upath, b_tasked)) \ or os.path.exists(os.path.join(os.path.dirname(b_path), b_tasked)): isit = True break return isit
def __init__(self, loader, variable_manager, host_list=C.DEFAULT_HOST_LIST): # the host file file, or script path, or list of hosts # if a list, inventory data will NOT be loaded self.host_list = unfrackpath(host_list, follow=False) self._loader = loader self._variable_manager = variable_manager self.localhost = None # caching to avoid repeated calculations, particularly with # external inventory scripts. self._vars_per_host = {} self._vars_per_group = {} self._hosts_cache = {} self._pattern_cache = {} self._group_dict_cache = {} self._vars_plugins = [] self._basedir = self.basedir() # Contains set of filenames under group_vars directories self._group_vars_files = self._find_group_vars_files(self._basedir) self._host_vars_files = self._find_host_vars_files(self._basedir) # to be set by calling set_playbook_basedir by playbook code self._playbook_basedir = None # the inventory object holds a list of groups self.groups = {} # a list of host(names) to contain current inquiries to self._restriction = None self._subset = None # clear the cache here, which is only useful if more than # one Inventory objects are created when using the API directly self.clear_pattern_cache() self.clear_group_dict_cache() self.parse_inventory(host_list)
def _is_role(self, path): ''' imperfect role detection, roles are still valid w/o tasks|meta/main.yml|yaml|etc ''' b_path = to_bytes(path, errors='surrogate_or_strict') b_upath = to_bytes(unfrackpath(path, follow=False), errors='surrogate_or_strict') for b_finddir in (b'meta', b'tasks'): for b_suffix in (b'.yml', b'.yaml', b''): b_main = b'main%s' % (b_suffix) b_tasked = os.path.join(b_finddir, b_main) if (RE_TASKS.search(path) and os.path.exists(os.path.join(b_path, b_main)) or os.path.exists(os.path.join(b_upath, b_tasked)) or os.path.exists( os.path.join(os.path.dirname(b_path), b_tasked))): return True return False
def _is_role(self, path): ''' imperfect role detection, roles are still valid w/o tasks|meta/main.yml|yaml|etc ''' b_path = to_bytes(path, errors='surrogate_or_strict') b_upath = to_bytes(unfrackpath(path, follow=False), errors='surrogate_or_strict') for b_finddir in (b'meta', b'tasks'): for b_suffix in (b'.yml', b'.yaml', b''): b_main = b'main%s' % (b_suffix) b_tasked = os.path.join(b_finddir, b_main) if ( RE_TASKS.search(path) and os.path.exists(os.path.join(b_path, b_main)) or os.path.exists(os.path.join(b_upath, b_tasked)) or os.path.exists(os.path.join(os.path.dirname(b_path), b_tasked)) ): return True return False
def find_role_in_content_path(role_name, loader, content_search_paths): '''search for role in ~/.ansible/content and return first match. return None if no matches''' # try the galaxy content paths # TODO: this is where a 'role spec resolver' could be plugged into. # The resolver would be responsible parsing/understanding the role spec # (a formatted string or a dict), and figuring out the approriate galaxy # namespace, repo name, and role name. # # The next step would be finding that role on the fs. # If there are conflicts or ambiquity, the resolver would apply # any rules or convention or precedence to choose the correct role. # For ex, if namespace isnt provided, and 2 or more namespaces have a # role that matches, the resolver would choose. # FIXME: mv to method, deindent, return early, etc log.debug('content_search_paths: %s', content_search_paths) content_rel_role_path = role_name_to_relative_content_path(role_name) log.debug('content_rel_role_path: %s', content_rel_role_path) # didn't parse the role_name, return None if not content_rel_role_path: return None # TODO: the for loop isnt needed if we really really only # support one content path for content_search_path in content_search_paths: fq_role_path = os.path.join(content_search_path, content_rel_role_path) fq_role_path = unfrackpath(fq_role_path) log.debug('fq_role_path: %s', fq_role_path) if loader.path_exists(fq_role_path): log.info('FOUND: %s at content path "%s"', role_name, fq_role_path) return (role_name, fq_role_path) return None
def run(self): super(ConfigCLI, self).run() if context.CLIARGS['config_file']: self.config_file = unfrackpath(context.CLIARGS['config_file'], follow=False) b_config = to_bytes(self.config_file) if os.path.exists(b_config) and os.access(b_config, os.R_OK): self.config = ConfigManager(self.config_file) else: raise AnsibleOptionsError( 'The provided configuration file is missing or not accessible: %s' % to_native(self.config_file)) else: self.config = C.config self.config_file = self.config._config_file if self.config_file: try: if not os.path.exists(self.config_file): raise AnsibleOptionsError( "%s does not exist or is not accessible" % (self.config_file)) elif not os.path.isfile(self.config_file): raise AnsibleOptionsError("%s is not a valid file" % (self.config_file)) os.environ['ANSIBLE_CONFIG'] = to_native(self.config_file) except Exception: if context.CLIARGS['action'] in ['view']: raise elif context.CLIARGS['action'] in ['edit', 'update']: display.warning( "File does not exist, used empty file: %s" % self.config_file) elif context.CLIARGS['action'] == 'view': raise AnsibleError('Invalid or no config file was supplied') # run the requested action context.CLIARGS['func']()
def parse_sources(self, cache=False): """ iterate over inventory sources and parse each one to populate it""" parsed = False # allow for multiple inventory parsing for source in self._sources: if source: if type(source) == str and ',' not in source and not yaml.safe_load(source): source = unfrackpath(source, follow=False) print('source:::::??', source) parse = self.parse_source(source, cache=cache) if parse and not parsed: parsed = True if parsed: # do post processing self._inventory.reconcile_inventory() else: if C.INVENTORY_UNPARSED_IS_FAILED: raise AnsibleError("No inventory was parsed, please check your configuration and options.") else: logger.error("No inventory was parsed, only implicit localhost is available")
def parse_sources(self, cache=False): ''' iterate over inventory sources and parse each one to populate it''' parsed = False # allow for multiple inventory parsing for source in self._sources: if source: if ',' not in source: source = unfrackpath(source, follow=False) parse = self.parse_source(source, cache=cache) if parse and not parsed: parsed = True if parsed: # do post processing self._inventory.reconcile_inventory() else: if C.INVENTORY_UNPARSED_IS_FAILED: raise AnsibleError("No inventory was parsed, please check your configuration and options.") else: display.warning("No inventory was parsed, only implicit localhost is available")
def parse_sources(self, cache=False): ''' iterate over inventory sources and parse each one to populate it''' self._setup_inventory_plugins() parsed = False # allow for multiple inventory parsing for source in self._sources: if source: if ',' not in source: source = unfrackpath(source, follow=False) parse = self.parse_source(source, cache=cache) if parse and not parsed: parsed = True if parsed: # do post processing self._inventory.reconcile_inventory() else: display.warning("No inventory was parsed, only implicit localhost is available") self._inventory_plugins = []
def parse_sources(self, cache=True): ''' iterate over inventory sources and parse each one to populate it''' self._setup_inventory_plugins() parsed = False # allow for multiple inventory parsing for source in self._sources: if source: if ',' not in source: source = unfrackpath(source, follow=False) parse = self.parse_source(source, cache=cache) if parse and not parsed: parsed = True if parsed: # do post processing self._inventory.reconcile_inventory() else: display.warning("No inventory was parsed, only implicit localhost is available") self._inventory_plugins = []
def shutdown(self): """ Shuts down the local domain socket """ lock_path = unfrackpath("%s/.ansible_pc_lock_%s" % os.path.split(self.socket_path)) if os.path.exists(self.socket_path): try: if self.sock: self.sock.close() if self.connection: self.connection.close() display_messages(self.connection) except Exception: pass finally: if os.path.exists(self.socket_path): os.remove(self.socket_path) setattr(self.connection, '_socket_path', None) setattr(self.connection, '_connected', False) if os.path.exists(lock_path): os.remove(lock_path) display.display('shutdown complete', log_only=True)
def parse_sources(self, cache=False): ''' iterate over inventory sources and parse each one to populate it''' parsed = False # allow for multiple inventory parsing for source in self._sources: if source: if ',' not in source: source = unfrackpath(source, follow=False) parse = self.parse_source(source, cache=cache) if parse and not parsed: parsed = True if parsed: # do post processing self._inventory.reconcile_inventory() else: if C.INVENTORY_UNPARSED_IS_FAILED: raise AnsibleError( "No inventory was parsed, please check your configuration and options." ) else: display.warning( "No inventory was parsed, only implicit localhost is available" ) for group in self.groups.values(): group.vars = combine_vars( group.vars, get_vars_from_inventory_sources(self._loader, self._sources, [group], 'inventory')) for host in self.hosts.values(): host.vars = combine_vars( host.vars, get_vars_from_inventory_sources(self._loader, self._sources, [host], 'inventory'))
def run(self): super(ConfigCLI, self).run() if self.options.config_file: self.config_file = to_bytes(unfrackpath(self.options.config_file, follow=False)) self.config = ConfigManager(self.config_file) else: self.config = ConfigManager() self.config_file = to_bytes(find_ini_config_file()) try: if not os.path.exists(self.config_file): raise AnsibleOptionsError("%s does not exist or is not accessible" % (self.config_file)) elif not os.path.isfile(self.config_file): raise AnsibleOptionsError("%s is not a valid file" % (self.config_file)) os.environ['ANSIBLE_CONFIG'] = to_native(self.config_file) except: if self.action in ['view']: raise elif self.action in ['edit', 'update']: display.warning("File does not exist, used empty file: %s" % self.config_file) self.execute()
def json_to_xml(self, json_data): """ The method translates JSON data encoded as per YANG model (RFC 7951) to XML payload :param json_data: JSON data that should to translated to XML :return: XML data in string format. """ saved_arg = deepcopy(sys.argv) saved_stdout = sys.stdout saved_stderr = sys.stderr sys.stdout = sys.stderr = StringIO() plugin_instance = str(uuid.uuid4()) plugindir = unfrackpath(JSON2XML_DIR_PATH) makedirs_safe(plugindir) makedirs_safe(os.path.join(plugindir, plugin_instance)) jtox_file_path = os.path.join( JSON2XML_DIR_PATH, plugin_instance, "%s.%s" % (str(uuid.uuid4()), "jtox"), ) xml_file_path = os.path.join( JSON2XML_DIR_PATH, plugin_instance, "%s.%s" % (str(uuid.uuid4()), "xml"), ) jtox_file_path = os.path.realpath(os.path.expanduser(jtox_file_path)) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) yang_metada_dir = os.path.join( os.path.dirname(os.path.abspath(__file__)), "files/yang") yang_metadata_path = os.path.join(yang_metada_dir, "nc-op.yang") self._search_path += ":%s" % yang_metada_dir # fill in the sys args before invoking pyang sys.argv = ([ self._pyang_exec_path, "-f", "jtox", "-o", jtox_file_path, "-p", self._search_path, "--lax-quote-checks", ] + self._yang_files + [yang_metadata_path]) try: self._pyang_exec.run() except SystemExit: pass except Exception as e: temp_dir = os.path.join(JSON2XML_DIR_PATH, plugin_instance) shutil.rmtree( os.path.realpath(os.path.expanduser(temp_dir)), ignore_errors=True, ) raise AnsibleError( "Error while generating intermediate (jtox) file: %s" % e) finally: err = sys.stderr.getvalue() if err and "error" in err.lower(): if not self._keep_tmp_files: temp_dir = os.path.join(JSON2XML_DIR_PATH, plugin_instance) shutil.rmtree( os.path.realpath(os.path.expanduser(temp_dir)), ignore_errors=True, ) raise AnsibleError( "Error while generating intermediate (jtox) file: %s" % err) json2xml_exec_path = find_file_in_path("json2xml") json2xml_exec = imp.load_source("json2xml", json2xml_exec_path) # fill in the sys args before invoking json2xml sys.argv = [ json2xml_exec_path, "-t", self._doctype, "-o", xml_file_path, jtox_file_path, json_data, ] try: json2xml_exec.main() with open(xml_file_path, "r+") as fp: content = fp.read() except SystemExit: pass finally: err = sys.stderr.getvalue() if err and "error" in err.lower(): if not self._keep_tmp_files: temp_dir = os.path.join(JSON2XML_DIR_PATH, plugin_instance) shutil.rmtree( os.path.realpath(os.path.expanduser(temp_dir)), ignore_errors=True, ) raise AnsibleError("Error while translating to xml: %s" % err) sys.argv = saved_arg sys.stdout = saved_stdout sys.stderr = saved_stderr try: content = re.sub(r"<\? ?xml .*\? ?>", "", content) root = etree.fromstring(content) except Exception as e: raise AnsibleError("Error while reading xml document: %s" % e) finally: if not self._keep_tmp_files: temp_dir = os.path.join(JSON2XML_DIR_PATH, plugin_instance) shutil.rmtree( os.path.realpath(os.path.expanduser(temp_dir)), ignore_errors=True, ) return etree.tostring(root)
def xml_to_json(self, xml_data): """ The method translates XML data to JSON data encoded as per YANG model (RFC 7951) :param xml_data: XML data or file path containing xml data that should to translated to JSON :return: data in JSON format. """ plugindir = unfrackpath(XM2JSONL_DIR_PATH) makedirs_safe(plugindir) if os.path.isfile(xml_data): # input is xml file path xml_file_path = os.path.realpath(os.path.expanduser(xml_data)) else: # input is xml string, copy it to file in temporary location xml_file_path = os.path.join(XM2JSONL_DIR_PATH, "%s.%s" % (str(uuid.uuid4()), "xml")) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) with open(xml_file_path, "w") as f: if not xml_data.startswith("<?xml version"): xml_data = ('<?xml version="1.0" encoding="UTF-8"?>\n' + xml_data) data = xml_data f.write(data) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) try: # validate xml etree.parse(xml_file_path) display.vvvv("Parsing xml data from temporary file: %s" % xml_file_path) except Exception as exc: if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True, ) raise AnsibleError("Failed to load xml data: %s" % (to_text(exc, errors="surrogate_or_strict"))) base_pyang_path = sys.modules["pyang"].__file__ pyang_exec_path = find_file_in_path("pyang") pyang_exec = imp.load_source("pyang", pyang_exec_path) saved_arg = deepcopy(sys.argv) sys.modules["pyang"].__file__ = base_pyang_path saved_stdout = sys.stdout saved_stderr = sys.stderr sys.stdout = sys.stderr = StringIO() xsl_file_path = os.path.join(XM2JSONL_DIR_PATH, "%s.%s" % (str(uuid.uuid4()), "xsl")) json_file_path = os.path.join(XM2JSONL_DIR_PATH, "%s.%s" % (str(uuid.uuid4()), "json")) xls_file_path = os.path.realpath(os.path.expanduser(xsl_file_path)) json_file_path = os.path.realpath(os.path.expanduser(json_file_path)) # fill in the sys args before invoking pyang sys.argv = [ pyang_exec_path, "-f", "jsonxsl", "-o", xls_file_path, "-p", self._search_path, "--lax-quote-checks", ] + self._yang_files display.display( "Generating xsl file '%s' by executing command '%s'" % (xls_file_path, " ".join(sys.argv)), log_only=True, ) try: pyang_exec.run() except SystemExit: pass except Exception as e: if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True, ) raise AnsibleError( "Error while generating intermediate (xsl) file: %s" % e) finally: err = sys.stderr.getvalue() if err and "error" in err.lower(): if not self._keep_tmp_files: shutil.rmtree( os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True, ) raise AnsibleError( "Error while generating (xsl) intermediate file: %s" % err) xsltproc_exec_path = find_file_in_path("xsltproc") # fill in the sys args before invoking xsltproc sys.argv = [ xsltproc_exec_path, "-o", json_file_path, xsl_file_path, xml_file_path, ] display.display( "Generating json data in temp file '%s' by executing command '%s'" % (json_file_path, " ".join(sys.argv)), log_only=True, ) time.sleep(5) try: os.system(" ".join(sys.argv)) except SystemExit: pass finally: err = sys.stderr.getvalue() if err and "error" in err.lower(): if not self._keep_tmp_files: shutil.rmtree( os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True, ) raise AnsibleError("Error while translating to json: %s" % err) sys.argv = saved_arg sys.stdout = saved_stdout sys.stderr = saved_stderr try: display.vvvv("Reading output json data from temporary file: %s" % json_file_path) with open(json_file_path, "r") as fp: raw_content = fp.read() content = json.loads(raw_content) except Exception as e: raise AnsibleError( "Error while reading json document %s with content %s" % (e, raw_content)) finally: if not self._keep_tmp_files: shutil.rmtree( os.path.realpath(os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True, ) return content
def find_ini_config_file(warnings=None): ''' Load INI Config File order(first found is used): ENV, CWD, HOME, /etc/ansible ''' # FIXME: eventually deprecate ini configs if warnings is None: # Note: In this case, warnings does nothing warnings = set() # A value that can never be a valid path so that we can tell if ANSIBLE_CONFIG was set later # We can't use None because we could set path to None. SENTINEL = object potential_paths = [] # Environment setting path_from_env = os.getenv("ANSIBLE_CONFIG", SENTINEL) if path_from_env is not SENTINEL: path_from_env = unfrackpath(path_from_env, follow=False) if os.path.isdir(to_bytes(path_from_env)): path_from_env = os.path.join(path_from_env, "ansible.cfg") potential_paths.append(path_from_env) # Current working directory warn_cmd_public = False try: cwd = os.getcwd() perms = os.stat(cwd) cwd_cfg = os.path.join(cwd, "ansible.cfg") if perms.st_mode & stat.S_IWOTH: # Working directory is world writable so we'll skip it. # Still have to look for a file here, though, so that we know if we have to warn if os.path.exists(cwd_cfg): warn_cmd_public = True else: potential_paths.append(to_text(cwd_cfg, errors='surrogate_or_strict')) except OSError: # If we can't access cwd, we'll simply skip it as a possible config source pass # Per user location potential_paths.append(unfrackpath("~/.ansible.cfg", follow=False)) # System location potential_paths.append("/etc/ansible/ansible.cfg") for path in potential_paths: b_path = to_bytes(path) if os.path.exists(b_path) and os.access(b_path, os.R_OK): break else: path = None # Emit a warning if all the following are true: # * We did not use a config from ANSIBLE_CONFIG # * There's an ansible.cfg in the current working directory that we skipped if path_from_env != path and warn_cmd_public: warnings.add(u"Ansible is being run in a world writable directory (%s)," u" ignoring it as an ansible.cfg source." u" For more information see" u" https://docs.ansible.com/ansible/devel/reference_appendices/config.html#cfg-in-world-writable-dir" % to_text(cwd)) return path
def resolve_path(path, basedir=None): ''' resolve relative or 'variable' paths ''' if '{{CWD}}' in path: # allow users to force CWD using 'magic' {{CWD}} path = path.replace('{{CWD}}', os.getcwd()) return unfrackpath(path, follow=False, basedir=basedir)
def _build_command(self, binary, *other_args): ''' Takes a binary (ssh, scp, sftp) and optional extra arguments and returns a command line as an array that can be passed to subprocess.Popen. ''' b_command = [] # # First, the command to invoke # # If we want to use password authentication, we have to set up a pipe to # write the password to sshpass. if self._play_context.password: if not self._sshpass_available(): raise AnsibleError("to use the 'ssh' connection type with passwords, you must install the sshpass program") self.sshpass_pipe = os.pipe() b_command += [b'sshpass', b'-d' + to_bytes(self.sshpass_pipe[0], nonstring='simplerepr', errors='surrogate_or_strict')] if binary == 'ssh': b_command += [to_bytes(self._play_context.ssh_executable, errors='surrogate_or_strict')] else: b_command += [to_bytes(binary, errors='surrogate_or_strict')] # # Next, additional arguments based on the configuration. # # sftp batch mode allows us to correctly catch failed transfers, but can # be disabled if the client side doesn't support the option. However, # sftp batch mode does not prompt for passwords so it must be disabled # if not using controlpersist and using sshpass if binary == 'sftp' and C.DEFAULT_SFTP_BATCH_MODE: if self._play_context.password: b_args = [b'-o', b'BatchMode=no'] self._add_args(b_command, b_args, u'disable batch mode for sshpass') b_command += [b'-b', b'-'] if self._play_context.verbosity > 3: b_command.append(b'-vvv') # # Next, we add [ssh_connection]ssh_args from ansible.cfg. # if self._play_context.ssh_args: b_args = [to_bytes(a, errors='surrogate_or_strict') for a in self._split_ssh_args(self._play_context.ssh_args)] self._add_args(b_command, b_args, u"ansible.cfg set ssh_args") # Now we add various arguments controlled by configuration file settings # (e.g. host_key_checking) or inventory variables (ansible_ssh_port) or # a combination thereof. if not C.HOST_KEY_CHECKING: b_args = (b"-o", b"StrictHostKeyChecking=no") self._add_args(b_command, b_args, u"ANSIBLE_HOST_KEY_CHECKING/host_key_checking disabled") if self._play_context.port is not None: b_args = (b"-o", b"Port=" + to_bytes(self._play_context.port, nonstring='simplerepr', errors='surrogate_or_strict')) self._add_args(b_command, b_args, u"ANSIBLE_REMOTE_PORT/remote_port/ansible_port set") key = self._play_context.private_key_file if key: b_args = (b"-o", b'IdentityFile="' + to_bytes(os.path.expanduser(key), errors='surrogate_or_strict') + b'"') self._add_args(b_command, b_args, u"ANSIBLE_PRIVATE_KEY_FILE/private_key_file/ansible_ssh_private_key_file set") if not self._play_context.password: self._add_args( b_command, ( b"-o", b"KbdInteractiveAuthentication=no", b"-o", b"PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey", b"-o", b"PasswordAuthentication=no" ), u"ansible_password/ansible_ssh_pass not set" ) user = self._play_context.remote_user if user: self._add_args( b_command, (b"-o", b"User="******"ANSIBLE_REMOTE_USER/remote_user/ansible_user/user/-u set" ) self._add_args( b_command, (b"-o", b"ConnectTimeout=" + to_bytes(self._play_context.timeout, errors='surrogate_or_strict', nonstring='simplerepr')), u"ANSIBLE_TIMEOUT/timeout set" ) # Add in any common or binary-specific arguments from the PlayContext # (i.e. inventory or task settings or overrides on the command line). for opt in (u'ssh_common_args', u'{0}_extra_args'.format(binary)): attr = getattr(self._play_context, opt, None) if attr is not None: b_args = [to_bytes(a, errors='surrogate_or_strict') for a in self._split_ssh_args(attr)] self._add_args(b_command, b_args, u"PlayContext set %s" % opt) # Check if ControlPersist is enabled and add a ControlPath if one hasn't # already been set. controlpersist, controlpath = self._persistence_controls(b_command) if controlpersist: self._persistent = True if not controlpath: cpdir = unfrackpath(self.control_path_dir) b_cpdir = to_bytes(cpdir, errors='surrogate_or_strict') # The directory must exist and be writable. makedirs_safe(b_cpdir, 0o700) if not os.access(b_cpdir, os.W_OK): raise AnsibleError("Cannot write to ControlPath %s" % to_native(cpdir)) if not self.control_path: self.control_path = self._create_control_path( self.host, self.port, self.user ) b_args = (b"-o", b"ControlPath=" + to_bytes(self.control_path % dict(directory=cpdir), errors='surrogate_or_strict')) self._add_args(b_command, b_args, u"found only ControlPersist; added ControlPath") # Finally, we add any caller-supplied extras. if other_args: b_command += [to_bytes(a) for a in other_args] return b_command
def unfrack_path(option, opt, value, parser): if value != '-': setattr(parser.values, option.dest, unfrackpath(value)) else: setattr(parser.values, option.dest, value)
def resolve_path(path): ''' resolve relative or 'varaible' paths ''' if '{{CWD}}' in path: # allow users to force CWD using 'magic' {{CWD}} path = path.replace('{{CWD}}', os.getcwd()) return unfrackpath(path, follow=False)
def _build_command(self, binary, *other_args): """ Takes a binary (ssh, scp, sftp) and optional extra arguments and returns a command line as an array that can be passed to subprocess.Popen. """ self._command = [] ## First, the command name. # If we want to use password authentication, we have to set up a pipe to # write the password to sshpass. if self._play_context.password: if not self._sshpass_available(): raise AnsibleError( "to use the 'ssh' connection type with passwords, you must install the sshpass program" ) self.sshpass_pipe = os.pipe() self._command += ["sshpass", "-d{0}".format(self.sshpass_pipe[0])] self._command += [binary] ## Next, additional arguments based on the configuration. # sftp batch mode allows us to correctly catch failed transfers, but can # be disabled if the client side doesn't support the option. However, # sftp batch mode does not prompt for passwords so it must be disabled # if not using controlpersist and using sshpass if binary == "sftp" and C.DEFAULT_SFTP_BATCH_MODE: if self._play_context.password: self._add_args("disable batch mode for sshpass", ["-o", "BatchMode=no"]) self._command += ["-b", "-"] if self._play_context.verbosity > 3: self._command += ["-vvv"] elif binary == "ssh": # Older versions of ssh (e.g. in RHEL 6) don't accept sftp -q. self._command += ["-q"] # Next, we add [ssh_connection]ssh_args from ansible.cfg. if self._play_context.ssh_args: args = self._split_ssh_args(self._play_context.ssh_args) self._add_args("ansible.cfg set ssh_args", args) # Now we add various arguments controlled by configuration file settings # (e.g. host_key_checking) or inventory variables (ansible_ssh_port) or # a combination thereof. if not C.HOST_KEY_CHECKING: self._add_args("ANSIBLE_HOST_KEY_CHECKING/host_key_checking disabled", ("-o", "StrictHostKeyChecking=no")) if self._play_context.port is not None: self._add_args( "ANSIBLE_REMOTE_PORT/remote_port/ansible_port set", ("-o", "Port={0}".format(self._play_context.port)) ) key = self._play_context.private_key_file if key: self._add_args( "ANSIBLE_PRIVATE_KEY_FILE/private_key_file/ansible_ssh_private_key_file set", ("-o", 'IdentityFile="{0}"'.format(os.path.expanduser(key))), ) if not self._play_context.password: self._add_args( "ansible_password/ansible_ssh_pass not set", ( "-o", "KbdInteractiveAuthentication=no", "-o", "PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey", "-o", "PasswordAuthentication=no", ), ) user = self._play_context.remote_user if user: self._add_args( "ANSIBLE_REMOTE_USER/remote_user/ansible_user/user/-u set", ("-o", "User={0}".format(to_bytes(self._play_context.remote_user))), ) self._add_args("ANSIBLE_TIMEOUT/timeout set", ("-o", "ConnectTimeout={0}".format(self._play_context.timeout))) # Add in any common or binary-specific arguments from the PlayContext # (i.e. inventory or task settings or overrides on the command line). for opt in ["ssh_common_args", binary + "_extra_args"]: attr = getattr(self._play_context, opt, None) if attr is not None: args = self._split_ssh_args(attr) self._add_args("PlayContext set %s" % opt, args) # Check if ControlPersist is enabled and add a ControlPath if one hasn't # already been set. controlpersist, controlpath = self._persistence_controls(self._command) if controlpersist: self._persistent = True if not controlpath: cpdir = unfrackpath("$HOME/.ansible/cp") # The directory must exist and be writable. makedirs_safe(cpdir, 0o700) if not os.access(cpdir, os.W_OK): raise AnsibleError("Cannot write to ControlPath %s" % cpdir) args = ("-o", "ControlPath={0}".format(to_bytes(C.ANSIBLE_SSH_CONTROL_PATH % dict(directory=cpdir)))) self._add_args("found only ControlPersist; added ControlPath", args) ## Finally, we add any caller-supplied extras. if other_args: self._command += other_args return self._command
def _build_command(self, binary, *other_args): ''' Takes a binary (ssh, scp, sftp) and optional extra arguments and returns a command line as an array that can be passed to subprocess.Popen. ''' self._command = [] ## First, the command name. # If we want to use password authentication, we have to set up a pipe to # write the password to sshpass. if self._play_context.password: if not self._sshpass_available(): raise AnsibleError( "to use the 'ssh' connection type with passwords, you must install the sshpass program" ) self.sshpass_pipe = os.pipe() self._command += ['sshpass', '-d{0}'.format(self.sshpass_pipe[0])] self._command += [binary] ## Next, additional arguments based on the configuration. # sftp batch mode allows us to correctly catch failed transfers, but can # be disabled if the client side doesn't support the option. if binary == 'sftp' and C.DEFAULT_SFTP_BATCH_MODE: self._command += ['-b', '-'] self._command += ['-C'] if self._play_context.verbosity > 3: self._command += ['-vvv'] elif binary == 'ssh': # Older versions of ssh (e.g. in RHEL 6) don't accept sftp -q. self._command += ['-q'] # Next, we add [ssh_connection]ssh_args from ansible.cfg. if self._play_context.ssh_args: args = self._split_ssh_args(self._play_context.ssh_args) self._add_args("ansible.cfg set ssh_args", args) # Now we add various arguments controlled by configuration file settings # (e.g. host_key_checking) or inventory variables (ansible_ssh_port) or # a combination thereof. # BB Mod: Rely on ssh_config's strict host key checking; IOW don't add an explict SSH arg # if not C.HOST_KEY_CHECKING: # self._add_args( # "ANSIBLE_HOST_KEY_CHECKING/host_key_checking disabled", # ("-o", "StrictHostKeyChecking=no") # ) if self._play_context.port is not None: self._add_args("ANSIBLE_REMOTE_PORT/remote_port/ansible_port set", ("-o", "Port={0}".format(self._play_context.port))) key = self._play_context.private_key_file if key: self._add_args( "ANSIBLE_PRIVATE_KEY_FILE/private_key_file/ansible_ssh_private_key_file set", ("-o", "IdentityFile=\"{0}\"".format(os.path.expanduser(key)))) # BB Mod: Stick to authmethods: hostbased,publickey if not self._play_context.password: self._add_args("ansible_password/ansible_ssh_pass not set", ("-o", "KbdInteractiveAuthentication=no", "-o", "PreferredAuthentications=hostbased,publickey", "-o", "PasswordAuthentication=no")) user = self._play_context.remote_user if user: self._add_args( "ANSIBLE_REMOTE_USER/remote_user/ansible_user/user/-u set", ("-o", "User={0}".format( to_bytes(self._play_context.remote_user)))) self._add_args( "ANSIBLE_TIMEOUT/timeout set", ("-o", "ConnectTimeout={0}".format(self._play_context.timeout))) # Add in any common or binary-specific arguments from the PlayContext # (i.e. inventory or task settings or overrides on the command line). for opt in ['ssh_common_args', binary + '_extra_args']: attr = getattr(self._play_context, opt, None) if attr is not None: args = self._split_ssh_args(attr) self._add_args("PlayContext set %s" % opt, args) # Check if ControlPersist is enabled and add a ControlPath if one hasn't # already been set. controlpersist, controlpath = self._persistence_controls(self._command) if controlpersist: self._persistent = True if not controlpath: cpdir = unfrackpath('$HOME/.ansible/cp') # The directory must exist and be writable. makedirs_safe(cpdir, 0o700) if not os.access(cpdir, os.W_OK): raise AnsibleError("Cannot write to ControlPath %s" % cpdir) args = ("-o", "ControlPath={0}".format( to_bytes(C.ANSIBLE_SSH_CONTROL_PATH % dict(directory=cpdir)))) self._add_args("found only ControlPersist; added ControlPath", args) ## Finally, we add any caller-supplied extras. if other_args: self._command += other_args return self._command
def _build_command(self, binary, *other_args): self._command = [] self._command += [binary] self._command += ['-C'] if self._play_context.verbosity > 3: self._command += ['-vvv'] elif binary == 'ssh': # Older versions of ssh (e.g. in RHEL 6) don't accept sftp -q. self._command += ['-q'] # Next, we add [ssh_connection]ssh_args from ansible.cfg. # if self._play_context.ssh_args: # args = self._split_args(self._play_context.ssh_args) # self._add_args("ansible.cfg set ssh_args", args) # Now we add various arguments controlled by configuration file settings # (e.g. host_key_checking) or inventory variables (ansible_ssh_port) or # a combination thereof. if not C.HOST_KEY_CHECKING: self._add_args( "ANSIBLE_HOST_KEY_CHECKING/host_key_checking disabled", ("-o", "StrictHostKeyChecking=no") ) if self._play_context.port is not None: self._add_args( "ANSIBLE_REMOTE_PORT/remote_port/ansible_port set", ("-o", "Port={0}".format(self._play_context.port)) ) key = self._play_context.private_key_file if key: self._add_args( "ANSIBLE_PRIVATE_KEY_FILE/private_key_file/ansible_ssh_private_key_file set", ("-o", "IdentityFile=\"{0}\"".format(os.path.expanduser(key))) ) if not self._play_context.password: self._add_args( "ansible_password/ansible_ssh_pass not set", ( "-o", "KbdInteractiveAuthentication=no", "-o", "PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey", "-o", "PasswordAuthentication=no" ) ) user = self._play_context.remote_user if user: self._add_args( "ANSIBLE_REMOTE_USER/remote_user/ansible_user/user/-u set", ("-o", "User={0}".format(to_bytes(self._play_context.remote_user))) ) self._add_args( "ANSIBLE_TIMEOUT/timeout set", ("-o", "ConnectTimeout={0}".format(self._play_context.timeout)) ) # Check if ControlPersist is enabled and add a ControlPath if one hasn't # already been set. controlpersist, controlpath = self._persistence_controls(self._command) if controlpersist: self._persistent = True if not controlpath: cpdir = unfrackpath('$HOME/.ansible/cp') # The directory must exist and be writable. makedirs_safe(cpdir, 0o700) if not os.access(cpdir, os.W_OK): raise AnsibleError("Cannot write to ControlPath %s" % cpdir) args = ("-o", "ControlPath={0}".format( to_bytes(C.ANSIBLE_SSH_CONTROL_PATH % dict(directory=cpdir))) ) self._add_args("found only ControlPersist; added ControlPath", args) ## Finally, we add any caller-supplied extras. if other_args: self._command += other_args return self._command
def unfrack_path(option, opt, value, parser): """Turn an Option's data into a single path in Ansible locations""" if value != '-': setattr(parser.values, option.dest, unfrackpath(value)) else: setattr(parser.values, option.dest, value)
def run(self, terms, variables, **kwargs): res = [] try: xml_data = terms[0] except IndexError: raise AnsibleError( "Either xml string or path to xml file must be specified") try: yang_file = kwargs['yang_file'] except KeyError: raise AnsibleError("value of 'yang_file' must be specified") yang_file = os.path.realpath(os.path.expanduser(yang_file)) if not os.path.isfile(yang_file): # Maybe we are passing a glob? yang_files = glob.glob(yang_file) if not yang_files: # Glob returned no files raise AnsibleError('%s invalid file path' % yang_file) else: yang_files = [yang_file] search_path = kwargs.pop('search_path', '') keep_tmp_files = kwargs.pop('keep_tmp_files', False) abs_search_path = None for path in search_path.split(':'): path = os.path.realpath(os.path.expanduser(path)) if abs_search_path is None: abs_search_path = path else: abs_search_path += ':' + path if path != '' and not os.path.isdir(path): raise AnsibleError('%s is invalid directory path' % path) search_path = abs_search_path plugindir = unfrackpath(XM2JSONL_DIR_PATH) makedirs_safe(plugindir) if os.path.isfile(xml_data): # input is xml file path xml_file_path = xml_data else: # input is xml string, copy it to file in temporary location xml_file_path = os.path.join(XM2JSONL_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'xml')) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) with open(xml_file_path, 'w') as f: if not xml_data.startswith('<?xml version'): xml_data = '<?xml version="1.0" encoding="UTF-8"?>\n' + xml_data data = xml_data f.write(data) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) try: # validate xml etree.parse(xml_file_path) display.vvvv("Parsing xml data from temporary file: %s" % xml_file_path) except Exception as exc: if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True) raise AnsibleError("Failed to load xml data: %s" % (to_text(exc, errors='surrogate_or_strict'))) base_pyang_path = sys.modules['pyang'].__file__ pyang_exec_path = find_file_in_path('pyang') pyang_exec = imp.load_source('pyang', pyang_exec_path) saved_arg = deepcopy(sys.argv) sys.modules['pyang'].__file__ = base_pyang_path saved_stdout = sys.stdout saved_stderr = sys.stderr sys.stdout = sys.stderr = StringIO() xsl_file_path = os.path.join(XM2JSONL_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'xsl')) json_file_path = os.path.join(XM2JSONL_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'json')) xls_file_path = os.path.realpath(os.path.expanduser(xsl_file_path)) json_file_path = os.path.realpath(os.path.expanduser(json_file_path)) # fill in the sys args before invoking pyang sys.argv = [ pyang_exec_path, '-f', 'jsonxsl', '-o', xls_file_path, '-p', search_path, "--lax-quote-checks" ] + yang_files display.display("Generating xsl file '%s' by executing command '%s'" % (xls_file_path, ' '.join(sys.argv)), log_only=True) try: pyang_exec.run() except SystemExit: pass except Exception as e: if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True) raise AnsibleError( 'Error while generating intermediate (xsl) file: %s' % e) finally: err = sys.stderr.getvalue() if err and 'error' in err.lower(): if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True) raise AnsibleError( 'Error while generating (xsl) intermediate file: %s' % err) xsltproc_exec_path = find_file_in_path('xsltproc') # fill in the sys args before invoking xsltproc sys.argv = [ xsltproc_exec_path, '-o', json_file_path, xsl_file_path, xml_file_path ] display.display( "Generating json data in temp file '%s' by executing command '%s'" % (json_file_path, ' '.join(sys.argv)), log_only=True) time.sleep(5) try: os.system(' '.join(sys.argv)) except SystemExit: pass finally: err = sys.stderr.getvalue() if err and 'error' in err.lower(): if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True) raise AnsibleError('Error while translating to json: %s' % err) sys.argv = saved_arg sys.stdout = saved_stdout sys.stderr = saved_stderr try: display.vvvv("Reading output json data from temporary file: %s" % json_file_path) with open(json_file_path) as fp: content = json.load(fp) except Exception as e: raise AnsibleError('Error while reading json document: %s' % e) finally: if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(XM2JSONL_DIR_PATH)), ignore_errors=True) res.append(content) return res
def _load_role_path(self, role_name): ''' the 'role', as specified in the ds (or as a bare string), can either be a simple name or a full path. If it is a full path, we use the basename as the role name, otherwise we take the name as-given and append it to the default role path ''' # create a templar class to template the dependency names, in # case they contain variables if self._variable_manager is not None: all_vars = self._variable_manager.get_vars(play=self._play) else: all_vars = {} templar = Templar(loader=self._loader, variables=all_vars) role_name = templar.template(role_name) role_tuple = None # try to load as a collection-based role first if self._collection_list or AnsibleCollectionRef.is_valid_fqcr( role_name): role_tuple = _get_collection_role_path(role_name, self._collection_list) if role_tuple: # we found it, stash collection data and return the name/path tuple self._role_collection = role_tuple[2] return role_tuple[0:2] # We didn't find a collection role, look in defined role paths # FUTURE: refactor this to be callable from internal so we can properly order # ansible.legacy searches with the collections keyword # we always start the search for roles in the base directory of the playbook role_search_paths = [ os.path.join(self._loader.get_basedir(), u'roles'), ] # also search in the configured roles path if C.DEFAULT_ROLES_PATH: role_search_paths.extend(C.DEFAULT_ROLES_PATH) # next, append the roles basedir, if it was set, so we can # search relative to that directory for dependent roles if self._role_basedir: role_search_paths.append(self._role_basedir) # finally as a last resort we look in the current basedir as set # in the loader (which should be the playbook dir itself) but without # the roles/ dir appended role_search_paths.append(self._loader.get_basedir()) # now iterate through the possible paths and return the first one we find for path in role_search_paths: path = templar.template(path) role_path = unfrackpath(os.path.join(path, role_name)) if self._loader.path_exists(role_path): return (role_name, role_path) # if not found elsewhere try to extract path from name role_path = unfrackpath(role_name) if self._loader.path_exists(role_path): role_name = os.path.basename(role_name) return (role_name, role_path) searches = (self._collection_list or []) + role_search_paths raise AnsibleError("the role '%s' was not found in %s" % (role_name, ":".join(searches)), obj=self._ds)
def unfrack_path(option, opt, value, parser): setattr(parser.values, option.dest, unfrackpath(value))
def run(self, terms, variables, **kwargs): res = [] try: json_config = terms[0] except IndexError: raise AnsibleError("path to json file must be specified") try: yang_file = kwargs['yang_file'] except KeyError: raise AnsibleError("value of 'yang_file' must be specified") yang_file = os.path.realpath(os.path.expanduser(yang_file)) if not os.path.isfile(yang_file): # Maybe we are passing a glob? yang_files = glob.glob(yang_file) if not yang_files: # Glob returned no files raise AnsibleError('%s invalid file path' % yang_file) else: yang_files = [yang_file] search_path = kwargs.pop('search_path', '') keep_tmp_files = kwargs.pop('keep_tmp_files', False) abs_search_path = None for path in search_path.split(':'): path = os.path.realpath(os.path.expanduser(path)) if abs_search_path is None: abs_search_path = path else: abs_search_path += ':' + path if path is not '' and not os.path.isdir(path): raise AnsibleError('%s is invalid directory path' % path) search_path = abs_search_path json_config = os.path.realpath(os.path.expanduser(json_config)) try: # validate json with open(json_config) as fp: json.load(fp) except Exception as exc: raise AnsibleError("Failed to load json configuration: %s" % (to_text(exc, errors='surrogate_or_strict'))) root_node = kwargs.get('root', 'config') base_pyang_path = sys.modules['pyang'].__file__ pyang_exec_path = find_file_in_path('pyang') pyang_exec = imp.load_source('pyang', pyang_exec_path) saved_arg = deepcopy(sys.argv) sys.modules['pyang'].__file__ = base_pyang_path saved_stdout = sys.stdout saved_stderr = sys.stderr sys.stdout = sys.stderr = StringIO() plugindir = unfrackpath(JSON2XML_DIR_PATH) makedirs_safe(plugindir) jtox_file_path = os.path.join(JSON2XML_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'jtox')) xml_file_path = os.path.join(JSON2XML_DIR_PATH, '%s.%s' % (str(uuid.uuid4()), 'xml')) jtox_file_path = os.path.realpath(os.path.expanduser(jtox_file_path)) xml_file_path = os.path.realpath(os.path.expanduser(xml_file_path)) # fill in the sys args before invoking pyang sys.argv = [ pyang_exec_path, '-f', 'jtox', '-o', jtox_file_path, '-p', search_path, "--lax-quote-checks" ] + yang_files try: pyang_exec.run() except SystemExit: pass except Exception as e: shutil.rmtree(os.path.realpath( os.path.expanduser(JSON2XML_DIR_PATH)), ignore_errors=True) raise AnsibleError( 'Error while generating intermediate (jtox) file: %s' % e) finally: err = sys.stderr.getvalue() if err and 'error' in err.lower(): if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(JSON2XML_DIR_PATH)), ignore_errors=True) raise AnsibleError( 'Error while generating intermediate (jtox) file: %s' % err) json2xml_exec_path = find_file_in_path('json2xml') json2xml_exec = imp.load_source('json2xml', json2xml_exec_path) # fill in the sys args before invoking json2xml sys.argv = [ json2xml_exec_path, '-t', root_node, '-o', xml_file_path, jtox_file_path, json_config ] try: json2xml_exec.main() with open(xml_file_path, 'r+') as fp: content = fp.read() except SystemExit: pass finally: err = sys.stderr.getvalue() if err and 'error' in err.lower(): if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(JSON2XML_DIR_PATH)), ignore_errors=True) raise AnsibleError('Error while translating to xml: %s' % err) sys.argv = saved_arg sys.stdout = saved_stdout sys.stderr = saved_stderr try: content = re.sub(r'<\? ?xml .*\? ?>', '', content) root = etree.fromstring(content) except Exception as e: raise AnsibleError('Error while reading xml document: %s' % e) finally: if not keep_tmp_files: shutil.rmtree(os.path.realpath( os.path.expanduser(JSON2XML_DIR_PATH)), ignore_errors=True) res.append(etree.tostring(root)) return res
def _connect(self): ''' connect to the remote host ''' self._display.vvv("ESTABLISH SSH CONNECTION FOR USER: {0}".format(self._play_context.remote_user), host=self._play_context.remote_addr) if self._connected: return self # We start with ansible_ssh_args from the inventory if it's set, # or [ssh_connection]ssh_args from ansible.cfg, or the default # Control* settings. if self.ssh_args: args = self._split_args(self.ssh_args) self.add_args("inventory set ansible_ssh_args", args) elif C.ANSIBLE_SSH_ARGS: args = self._split_args(C.ANSIBLE_SSH_ARGS) self.add_args("ansible.cfg set ssh_args", args) else: args = ( "-o", "ControlMaster=auto", "-o", "ControlPersist=60s" ) self.add_args("default arguments", args) # If any of the above have set ControlPersist but not a # ControlPath, add one ourselves. cp_in_use = False cp_path_set = False for arg in self._common_args: if "ControlPersist" in arg: cp_in_use = True if "ControlPath" in arg: cp_path_set = True if cp_in_use and not cp_path_set: self._cp_dir = unfrackpath('$HOME/.ansible/cp') args = ("-o", "ControlPath=\"{0}\"".format( C.ANSIBLE_SSH_CONTROL_PATH % dict(directory=self._cp_dir)) ) self.add_args("found only ControlPersist; added ControlPath", args) # The directory must exist and be writable. makedirs_safe(self._cp_dir, 0o700) if not os.access(self._cp_dir, os.W_OK): raise AnsibleError("Cannot write to ControlPath %s" % self._cp_dir) if not C.HOST_KEY_CHECKING: self.add_args( "ANSIBLE_HOST_KEY_CHECKING/host_key_checking disabled", ("-o", "StrictHostKeyChecking=no") ) if self._play_context.port is not None: self.add_args( "ANSIBLE_REMOTE_PORT/remote_port/ansible_ssh_port set", ("-o", "Port={0}".format(self._play_context.port)) ) key = self._play_context.private_key_file if key: self.add_args( "ANSIBLE_PRIVATE_KEY_FILE/private_key_file/ansible_ssh_private_key_file set", ("-o", "IdentityFile=\"{0}\"".format(os.path.expanduser(key))) ) if not self._play_context.password: self.add_args( "ansible_password/ansible_ssh_pass not set", ( "-o", "KbdInteractiveAuthentication=no", "-o", "PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey", "-o", "PasswordAuthentication=no" ) ) user = self._play_context.remote_user if user and user != pwd.getpwuid(os.geteuid())[0]: self.add_args( "ANSIBLE_REMOTE_USER/remote_user/ansible_ssh_user/user/-u set", ("-o", "User={0}".format(self._play_context.remote_user)) ) self.add_args( "ANSIBLE_TIMEOUT/timeout set", ("-o", "ConnectTimeout={0}".format(self._play_context.timeout)) ) # If any extra SSH arguments are specified in the inventory for # this host, or specified as an override on the command line, # add them in. if self._play_context.ssh_extra_args: args = self._split_args(self._play_context.ssh_extra_args) self.add_args("command-line added --ssh-extra-args", args) elif self.ssh_extra_args: args = self._split_args(self.ssh_extra_args) self.add_args("inventory added ansible_ssh_extra_args", args) self._connected = True return self
def parse(self): """Parse the command line args This method parses the command line arguments. It uses the parser stored in the self.parser attribute and saves the args and options in self.args and self.options respectively. Subclasses need to implement this method. They will usually create a base_parser, add their own options to the base_parser, and then call this method to do the actual parsing. An implementation will look something like this:: def parse(self): parser = super(MyCLI, self).base_parser(usage="My Ansible CLI", inventory_opts=True) parser.add_option('--my-option', dest='my_option', action='store') self.parser = parser super(MyCLI, self).parse() # If some additional transformations are needed for the # arguments and options, do it here. """ self.options, self.args = self.parser.parse_args(self.args[1:]) # process tags if hasattr(self.options, 'tags') and not self.options.tags: # optparse defaults does not do what's expected self.options.tags = ['all'] if hasattr(self.options, 'tags') and self.options.tags: if not C.MERGE_MULTIPLE_CLI_TAGS: if len(self.options.tags) > 1: display.deprecated( 'Specifying --tags multiple times on the command line currently uses the last specified value. ' 'In 2.4, values will be merged instead. Set merge_multiple_cli_tags=True in ansible.cfg to get this behavior now.', version=2.5, removed=False) self.options.tags = [self.options.tags[-1]] tags = set() for tag_set in self.options.tags: for tag in tag_set.split(u','): tags.add(tag.strip()) self.options.tags = list(tags) # process skip_tags if hasattr(self.options, 'skip_tags') and self.options.skip_tags: if not C.MERGE_MULTIPLE_CLI_TAGS: if len(self.options.skip_tags) > 1: display.deprecated( 'Specifying --skip-tags multiple times on the command line currently uses the last specified value. ' 'In 2.4, values will be merged instead. Set merge_multiple_cli_tags=True in ansible.cfg to get this behavior now.', version=2.5, removed=False) self.options.skip_tags = [self.options.skip_tags[-1]] skip_tags = set() for tag_set in self.options.skip_tags: for tag in tag_set.split(u','): skip_tags.add(tag.strip()) self.options.skip_tags = list(skip_tags) # process inventory options except for CLIs that require their own processing if hasattr(self.options, 'inventory') and not self.SKIP_INVENTORY_DEFAULTS: if self.options.inventory: # should always be list if isinstance(self.options.inventory, string_types): self.options.inventory = [self.options.inventory] # Ensure full paths when needed self.options.inventory = [ unfrackpath(opt, follow=False) if ',' not in opt else opt for opt in self.options.inventory ] else: self.options.inventory = C.DEFAULT_HOST_LIST
def parse(self): """Parse the command line args This method parses the command line arguments. It uses the parser stored in the self.parser attribute and saves the args and options in self.args and self.options respectively. Subclasses need to implement this method. They will usually create a base_parser, add their own options to the base_parser, and then call this method to do the actual parsing. An implementation will look something like this:: def parse(self): parser = super(MyCLI, self).base_parser(usage="My Ansible CLI", inventory_opts=True) parser.add_option('--my-option', dest='my_option', action='store') self.parser = parser super(MyCLI, self).parse() # If some additional transformations are needed for the # arguments and options, do it here. """ self.options, self.args = self.parser.parse_args(self.args[1:]) # process tags if hasattr(self.options, 'tags') and not self.options.tags: # optparse defaults does not do what's expected self.options.tags = ['all'] if hasattr(self.options, 'tags') and self.options.tags: if not C.MERGE_MULTIPLE_CLI_TAGS: if len(self.options.tags) > 1: display.deprecated('Specifying --tags multiple times on the command line currently uses the last specified value. ' 'In 2.4, values will be merged instead. Set merge_multiple_cli_tags=True in ansible.cfg to get this behavior now.', version=2.5, removed=False) self.options.tags = [self.options.tags[-1]] tags = set() for tag_set in self.options.tags: for tag in tag_set.split(u','): tags.add(tag.strip()) self.options.tags = list(tags) # process skip_tags if hasattr(self.options, 'skip_tags') and self.options.skip_tags: if not C.MERGE_MULTIPLE_CLI_TAGS: if len(self.options.skip_tags) > 1: display.deprecated('Specifying --skip-tags multiple times on the command line currently uses the last specified value. ' 'In 2.4, values will be merged instead. Set merge_multiple_cli_tags=True in ansible.cfg to get this behavior now.', version=2.5, removed=False) self.options.skip_tags = [self.options.skip_tags[-1]] skip_tags = set() for tag_set in self.options.skip_tags: for tag in tag_set.split(u','): skip_tags.add(tag.strip()) self.options.skip_tags = list(skip_tags) # process inventory options except for CLIs that require their own processing if hasattr(self.options, 'inventory') and not self.SKIP_INVENTORY_DEFAULTS: if self.options.inventory: # should always be list if isinstance(self.options.inventory, string_types): self.options.inventory = [self.options.inventory] # Ensure full paths when needed self.options.inventory = [unfrackpath(opt, follow=False) if ',' not in opt else opt for opt in self.options.inventory] else: self.options.inventory = C.DEFAULT_HOST_LIST