def parse(self, inventory, loader, path, cache=False): try: self._nmap = get_bin_path('nmap') except ValueError as e: raise AnsibleParserError( 'nmap inventory plugin requires the nmap cli tool to work: {0}' .format(to_native(e))) super(InventoryModule, self).parse(inventory, loader, path, cache=cache) self._read_config_data(path) # setup command cmd = [self._nmap] if not self._options['ports']: cmd.append('-sP') if self._options['ipv4'] and not self._options['ipv6']: cmd.append('-4') elif self._options['ipv6'] and not self._options['ipv4']: cmd.append('-6') elif not self._options['ipv6'] and not self._options['ipv4']: raise AnsibleParserError( 'One of ipv4 or ipv6 must be enabled for this plugin') if self._options['exclude']: cmd.append('--exclude') cmd.append(','.join(self._options['exclude'])) cmd.append(self._options['address']) try: # execute p = Popen(cmd, stdout=PIPE, stderr=PIPE) stdout, stderr = p.communicate() if p.returncode != 0: raise AnsibleParserError('Failed to run nmap, rc=%s: %s' % (p.returncode, to_native(stderr))) # parse results host = None ip = None ports = [] try: t_stdout = to_text(stdout, errors='surrogate_or_strict') except UnicodeError as e: raise AnsibleParserError( 'Invalid (non unicode) input returned: %s' % to_native(e)) for line in t_stdout.splitlines(): hits = self.find_host.match(line) if hits: if host is not None: self.inventory.set_variable(host, 'ports', ports) # if dns only shows arpa, just use ip instead as hostname if hits.group(1).endswith('.in-addr.arpa'): host = hits.group(2) else: host = hits.group(1) # if no reverse dns exists, just use ip instead as hostname if hits.group(2) is not None: ip = hits.group(2) else: ip = hits.group(1) if host is not None: # update inventory self.inventory.add_host(host) self.inventory.set_variable(host, 'ip', ip) ports = [] continue host_ports = self.find_port.match(line) if host is not None and host_ports: ports.append({ 'port': host_ports.group(1), 'protocol': host_ports.group(2), 'state': host_ports.group(3), 'service': host_ports.group(4) }) continue # TODO: parse more data, OS? # if any leftovers if host and ports: self.inventory.set_variable(host, 'ports', ports) except Exception as e: raise AnsibleParserError("failed to parse %s: %s " % (to_native(path), to_native(e)))
def preprocess_data(self, ds): ''' tasks are especially complex arguments so need pre-processing. keep it short. ''' assert isinstance(ds, dict) # the new, cleaned datastructure, which will have legacy # items reduced to a standard structure suitable for the # attributes of the task class new_ds = AnsibleMapping() if isinstance(ds, AnsibleBaseYAMLObject): new_ds.ansible_pos = ds.ansible_pos # use the args parsing class to determine the action, args, # and the delegate_to value from the various possible forms # supported as legacy args_parser = ModuleArgsParser(task_ds=ds) try: (action, args, delegate_to) = args_parser.parse() except AnsibleParserError as e: raise AnsibleParserError(to_native(e), obj=ds) # the command/shell/script modules used to support the `cmd` arg, # which corresponds to what we now call _raw_params, so move that # value over to _raw_params (assuming it is empty) if action in ('command', 'shell', 'script'): if 'cmd' in args: if args.get('_raw_params', '') != '': raise AnsibleError( "The 'cmd' argument cannot be used when other raw parameters are specified." " Please put everything in one or the other place.", obj=ds) args['_raw_params'] = args.pop('cmd') new_ds['action'] = action new_ds['args'] = args new_ds['delegate_to'] = delegate_to # we handle any 'vars' specified in the ds here, as we may # be adding things to them below (special handling for includes). # When that deprecated feature is removed, this can be too. if 'vars' in ds: # _load_vars is defined in Base, and is used to load a dictionary # or list of dictionaries in a standard way new_ds['vars'] = self._load_vars(None, ds.get('vars')) else: new_ds['vars'] = dict() for (k, v) in iteritems(ds): if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell': # we don't want to re-assign these values, which were # determined by the ModuleArgsParser() above continue elif k.replace("with_", "") in lookup_loader: self._preprocess_loop(ds, new_ds, k, v) else: # pre-2.0 syntax allowed variables for include statements at the # top level of the task, so we move those into the 'vars' dictionary # here, and show a deprecation message as we will remove this at # some point in the future. if action == 'include' and k not in self._valid_attrs and k not in self.DEPRECATED_ATTRIBUTES: display.deprecated( "Specifying include variables at the top-level of the task is deprecated." " Please see:\nhttp://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n" " for currently supported syntax regarding included files and variables" ) new_ds['vars'][k] = v else: new_ds[k] = v return super(Task, self).preprocess_data(new_ds)
def _get_file_contents(self, file_name): file_name = to_text(file_name) if file_name in self._file_mapping: return (to_bytes(self._file_mapping[file_name]), False) else: raise AnsibleParserError("file not found: %s" % file_name)
def parse(self): ''' Given a task in one of the supported forms, parses and returns returns the action, arguments, and delegate_to values for the task, dealing with all sorts of levels of fuzziness. ''' thing = None action = None delegate_to = self._task_ds.get('delegate_to', None) args = dict() # This is the standard YAML form for command-type modules. We grab # the args and pass them in as additional arguments, which can/will # be overwritten via dict updates from the other arg sources below additional_args = self._task_ds.get('args', dict()) # We can have one of action, local_action, or module specified # action if 'action' in self._task_ds: # an old school 'action' statement thing = self._task_ds['action'] action, args = self._normalize_parameters(thing, action=action, additional_args=additional_args) # local_action if 'local_action' in self._task_ds: # local_action is similar but also implies a delegate_to if action is not None: raise AnsibleParserError("action and local_action are mutually exclusive", obj=self._task_ds) thing = self._task_ds.get('local_action', '') delegate_to = 'localhost' action, args = self._normalize_parameters(thing, action=action, additional_args=additional_args) # module: <stuff> is the more new-style invocation # walk the input dictionary to see we recognize a module name for (item, value) in iteritems(self._task_ds): if item in module_loader or item in ['meta', 'include', 'include_role']: # finding more than one module name is a problem if action is not None: raise AnsibleParserError("conflicting action statements: %s, %s" % (action, item), obj=self._task_ds) action = item thing = value action, args = self._normalize_parameters(thing, action=action, additional_args=additional_args) # if we didn't see any module in the task at all, it's not a task really if action is None: if 'ping' not in module_loader: raise AnsibleParserError("The requested action was not found in configured module paths. " "Additionally, core modules are missing. If this is a checkout, " "run 'git pull --rebase' to correct this problem.", obj=self._task_ds) else: raise AnsibleParserError("no action detected in task. This often indicates a misspelled module name, or incorrect module path.", obj=self._task_ds) elif args.get('_raw_params', '') != '' and action not in RAW_PARAM_MODULES: templar = Templar(loader=None) raw_params = args.pop('_raw_params') if templar._contains_vars(raw_params): args['_variable_params'] = raw_params else: raise AnsibleParserError("this task '%s' has extra params, which is only allowed in the following modules: %s" % (action, ", ".join(RAW_PARAM_MODULES)), obj=self._task_ds) # shell modules require special handling (action, args) = self._handle_shell_weirdness(action, args) return (action, args, delegate_to)
def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None): ''' Given a list of task datastructures (parsed from YAML), return a list of Task() or TaskInclude() objects. ''' # we import here to prevent a circular dependency with imports from ansible.playbook.block import Block from ansible.playbook.handler import Handler from ansible.playbook.task import Task if not isinstance(ds, list): raise AnsibleParserError('task has bad type: "%s". Expected "list"' % type(ds).__name__, obj=ds) task_list = [] for task in ds: if not isinstance(task, dict): raise AnsibleParserError( 'task/handler has bad type: "%s". Expected "dict"' % type(task).__name__, obj=task) if 'block' in task: t = Block.load( task, play=play, parent_block=block, role=role, task_include=task_include, use_handlers=use_handlers, variable_manager=variable_manager, loader=loader, ) else: if use_handlers: t = Handler.load(task, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader) else: t = Task.load(task, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader) task_list.append(t) return task_list
def parse(self, inventory, loader, path, cache=True): super(InventoryModule, self).parse(inventory, loader, path) self._read_config_data(path) self._warn_about_deprecations() named_groups = self.get_option("named_groups") group_by = self.get_option("group_by") if named_groups and group_by: raise AnsibleParserError( "Invalid configuration: 'named_groups' and 'group_by' are mutually " "exclusive." ) self.validate_grouping_conditions(named_groups, group_by) try: client = Client(**self._get_instance()) except ServiceNowError as e: raise AnsibleParserError(e) enhanced = self.get_option("enhanced") if enhanced and (named_groups or group_by): raise AnsibleParserError( "Option 'enhanced' is incompatible with options 'named_groups' or " "'group_by'." ) table_client = TableClient(client) table = self.get_option("table") host_source = self.get_option("ansible_host_source") name_source = self.get_option("inventory_hostname_source") columns = self.get_option("columns") if named_groups: # Creates exactly the specified groups (which might be empty). # Leaves nothing ungrouped. self.fill_desired_groups( table_client, table, host_source, name_source, columns, named_groups ) return if group_by: self.fill_auto_groups( table_client, table, host_source, name_source, columns, group_by ) return query = self.get_option("query") # TODO: Insert caching here once we remove deprecated functionality records = fetch_records(table_client, table, query) if enhanced: rel_records = fetch_records( table_client, REL_TABLE, REL_QUERY, fields=REL_FIELDS ) enhance_records_with_rel_groups(records, rel_records) self.fill_constructed( records, columns, host_source, name_source, self.get_option("compose"), self.get_option("groups"), self.get_option("keyed_groups"), self.get_option("strict"), enhanced, )
def parse(self, inventory, loader, path, cache=True): if not HAS_GOOGLE_LIBRARIES: raise AnsibleParserError('gce inventory plugin cannot start: %s' % missing_required_lib('google-auth')) super(InventoryModule, self).parse(inventory, loader, path) config_data = {} config_data = self._read_config_data(path) if self.get_option('use_contrib_script_compatible_sanitization'): self._sanitize_group_name = self._legacy_script_compatible_group_sanitization # setup parameters as expected by 'fake module class' to reuse module_utils w/o changing the API params = { 'filters': self.get_option('filters'), 'projects': self.get_option('projects'), 'scopes': self.get_option('scopes'), 'zones': self.get_option('zones'), 'auth_kind': self.get_option('auth_kind'), 'service_account_file': self.get_option('service_account_file'), 'service_account_email': self.get_option('service_account_email'), } self.fake_module = GcpMockModule(params) self.auth_session = GcpSession(self.fake_module, 'compute') query = self._get_query_options(params['filters']) if self.get_option('retrieve_image_info'): project_disks = self._get_project_disks(config_data, query) else: project_disks = None # Cache logic if cache: cache = self.get_option('cache') cache_key = self.get_cache_key(path) else: cache_key = None cache_needs_update = False if cache: try: results = self._cache[cache_key] for project in results: for zone in results[project]: self._add_hosts(results[project][zone], config_data, False, project_disks=project_disks) except KeyError: cache_needs_update = True if not cache or cache_needs_update: cached_data = {} for project in params['projects']: cached_data[project] = {} params['project'] = project if not params['zones']: zones = self._get_zones(project, params) else: zones = params['zones'] for zone in zones: link = self._instances % (project, zone) params['zone'] = zone resp = self.fetch_list(params, link, query) self._add_hosts(resp.get('items'), config_data, project_disks=project_disks) cached_data[project][zone] = resp.get('items') if cache_needs_update: self._cache[cache_key] = cached_data
def _get_file_contents(self, path): path = to_text(path) if path in self._file_mapping: return (to_bytes(self._file_mapping[path]), False) else: raise AnsibleParserError("file not found: %s" % path)
def parse(self, inventory, loader, path, cache=True): # call base method to ensure properties are available for use with other helper methods super(InventoryModule, self).parse(inventory, loader, path, cache) # this method will parse 'common format' inventory sources and # update any options declared in DOCUMENTATION as needed # config = self._read_config_data(self, path) self._read_config_data(path) # if NOT using _read_config_data you should call set_options directly, # to process any defined configuration for this plugin, # if you dont define any options you can skip # self.set_options() # self._read_virlrc() # self._get_simulation() if 'VIRL_HOST' in os.environ and len(os.environ['VIRL_HOST']): self.virl_host = os.environ['VIRL_HOST'] else: self.virl_host = self.get_option('host') self.display.vvv("virl.py - VIRL_HOST: {0}".format(self.virl_host)) if 'VIRL_USERNAME' in os.environ and len(os.environ['VIRL_USERNAME']): self.virl_username = os.environ['VIRL_USERNAME'] else: self.virl_username = self.get_option('username') self.display.vvv("virl.py - VIRL_USERNAME: {0}".format( self.virl_username)) if 'VIRL_PASSWORD' in os.environ and len(os.environ['VIRL_PASSWORD']): self.virl_password = os.environ['VIRL_PASSWORD'] else: self.virl_password = self.get_option('password') if 'VIRL_LAB' in os.environ and len(os.environ['VIRL_LAB']): self.virl_lab = os.environ['VIRL_LAB'] else: self.virl_lab = self.get_option('lab') self.display.vvv("virl.py - VIRL_LAB: {0}".format(self.virl_lab)) self.inventory.set_variable('all', 'virl_host', self.virl_host) self.inventory.set_variable('all', 'virl_username', self.virl_username) self.inventory.set_variable('all', 'virl_password', self.virl_password) self.inventory.set_variable('all', 'virl_session', self.simulation) self.inventory.set_variable('all', 'virl_simulation', self.simulation) url = "http://%s:19399/simengine/rest/interfaces/%s" % ( self.virl_host, self.simulation) # perform REST operation simulations = requests.get(url, auth=(self.virl_username, self.virl_password)) if simulations.status_code == 200: interfaces = simulations.json()[self.simulation] try: group = self.inventory.add_group('virl_hosts') except AnsibleError as e: raise AnsibleParserError("Unable to add group %s: %s" % (group, to_text(e))) for key, value in interfaces.items(): self.inventory.add_host(key, group='virl_hosts') self.inventory.set_variable( key, 'ansible_host', value['management']['ip-address'].split('/')[0])
def parse_source(self, source, cache=False): ''' Generate or update inventory for the source provided ''' parsed = False display.debug(u'Examining possible inventory source: %s' % source) b_source = to_bytes(source) # process directories as a collection of inventories if os.path.isdir(b_source): display.debug(u'Searching for inventory files in directory: %s' % source) for i in sorted(os.listdir(b_source)): display.debug(u'Considering %s' % i) # Skip hidden files and stuff we explicitly ignore if IGNORED.search(i): continue # recursively deal with directory entries fullpath = os.path.join(b_source, i) parsed_this_one = self.parse_source(to_native(fullpath)) display.debug(u'parsed %s as %s' % (fullpath, parsed_this_one)) if not parsed: parsed = parsed_this_one else: # left with strings or files, let plugins figure it out # set so new hosts can use for inventory_file/dir vasr self._inventory.current_source = source # get inventory plugins if needed, there should always be at least one generator if not self._inventory_plugins: self._setup_inventory_plugins() # try source with each plugin failures = [] for plugin in self._inventory_plugins: plugin_name = to_native( getattr(plugin, '_load_name', getattr(plugin, '_original_path', ''))) display.debug(u'Attempting to use plugin %s (%s)' % (plugin_name, plugin._original_path)) # initialize if plugin.verify_file(source): try: # in case plugin fails 1/2 way we dont want partial inventory plugin.parse(self._inventory, self._loader, source, cache=cache) parsed = True display.vvv( 'Parsed %s inventory source with %s plugin' % (to_text(source), plugin_name)) break except AnsibleParserError as e: display.debug('%s was not parsable by %s' % (to_text(source), plugin_name)) failures.append({ 'src': source, 'plugin': plugin_name, 'exc': e }) except Exception as e: display.debug('%s failed to parse %s' % (plugin_name, to_text(source))) failures.append({ 'src': source, 'plugin': plugin_name, 'exc': e }) else: display.debug('%s did not meet %s requirements' % (to_text(source), plugin_name)) else: if not parsed and failures: # only if no plugin processed files should we show errors. if C.INVENTORY_UNPARSED_IS_FAILED: msg = "Could not parse inventory source %s with available plugins:\n" % source for fail in failures: msg += 'Plugin %s failed: %s\n' % ( fail['plugin'], to_native(fail['exc'])) if display.verbosity >= 3: msg += "%s\n" % fail['exc'].tb raise AnsibleParserError(msg) else: for fail in failures: display.warning( u'\n* Failed to parse %s with %s plugin: %s' % (to_text(fail['src']), fail['plugin'], to_text(fail['exc']))) if hasattr(fail['exc'], 'tb'): display.vvv(to_text(fail['exc'].tb)) if not parsed: display.warning("Unable to parse %s as an inventory source" % to_text(source)) # clear up, jic self._inventory.current_source = None return parsed
def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None): ir = IncludeRole(block, role, task_include=task_include).load_data( data, variable_manager=variable_manager, loader=loader) # Validate options my_arg_names = frozenset(ir.args.keys()) # name is needed, or use role as alias ir._role_name = ir.args.get('name', ir.args.get('role')) if ir._role_name is None: raise AnsibleParserError("'name' is a required field for %s." % ir.action, obj=data) if 'public' in ir.args and ir.action != 'include_role': raise AnsibleParserError('Invalid options for %s: private' % ir.action, obj=data) if 'private' in ir.args: display.deprecated( msg='Supplying "private" for "%s" is a no op, and is deprecated' % ir.action, version='2.8') # validate bad args, otherwise we silently ignore bad_opts = my_arg_names.difference(IncludeRole.VALID_ARGS) if bad_opts: raise AnsibleParserError('Invalid options for %s: %s' % (ir.action, ','.join(list(bad_opts))), obj=data) # build options for role includes for key in my_arg_names.intersection(IncludeRole.FROM_ARGS): from_key = key.replace('_from', '') ir._from_files[from_key] = basename(ir.args.get(key)) apply_attrs = ir.args.pop('apply', {}) if apply_attrs and ir.action != 'include_role': raise AnsibleParserError('Invalid options for %s: apply' % ir.action, obj=data) elif apply_attrs: apply_attrs['block'] = [] p_block = Block.load( apply_attrs, play=block._play, parent_block=block, role=role, task_include=task_include, use_handlers=block._use_handlers, variable_manager=variable_manager, loader=loader, ) ir._parent = p_block # manual list as otherwise the options would set other task parameters we don't want. for option in my_arg_names.intersection(IncludeRole.OTHER_ARGS): setattr(ir, option, ir.args.get(option)) return ir
def get_vars(self, loader, path, entities, cache=True): ''' parses the vars/<env>/<type>.yaml files ''' if not isinstance(entities, list): entities = [entities] if isinstance(entities[0], Host): return {} super(VarsModule, self).get_vars(loader, path, entities) data = {} # Grab name of first env group. If there is no env group for these entities, return empty vars try: envs = [str(e) for e in entities if self.is_env(e)] # Grab first environment name if for some reason there are two declared env = envs[0] # Remove env groups from entities entities = [e for e in entities if not self.is_env(e)] except: return data for entity in entities: if isinstance(entity, Group): subdir = 'vars/{}'.format(env) else: raise AnsibleParserError( "Supplied entity must be Host or Group, got %s instead" % (type(entity))) # avoid 'chroot' type inventory hostnames /path/to/chroot if not entity.name.startswith(os.path.sep): try: found_files = [] # load vars b_opath = os.path.realpath( to_bytes(os.path.join(self._basedir, subdir))) opath = to_text(b_opath) key = '%s.%s' % (entity.name, opath) if cache and key in FOUND: found_files = FOUND[key] else: # no need to do much if path does not exist for basedir if os.path.exists(b_opath): if os.path.isdir(b_opath): self._display.debug("\tprocessing dir %s" % opath) # Find .yml and .yaml files found_files = loader.find_vars_files( opath, entity.name + '.yml') found_files += loader.find_vars_files( opath, entity.name + '.yaml') FOUND[key] = found_files else: self._display.warning( "Found %s that is not a directory, skipping: %s" % (subdir, opath)) for found in found_files: new_data = loader.load_from_file(found, cache=True, unsafe=True) if new_data: # ignore empty files data = combine_vars(data, new_data) except Exception as e: raise AnsibleParserError(to_native(e)) return data
def _load_role_data(self, role_include, parent_role=None): self._role_name = role_include.role self._role_path = role_include.get_role_path() self._role_params = role_include.get_role_params() self._variable_manager = role_include.get_variable_manager() self._loader = role_include.get_loader() if parent_role: self.add_parent(parent_role) current_when = getattr(self, 'when')[:] current_when.extend(role_include.when) setattr(self, 'when', current_when) current_tags = getattr(self, 'tags')[:] current_tags.extend(role_include.tags) setattr(self, 'tags', current_tags) # save the current base directory for the loader and set it to the current role path #cur_basedir = self._loader.get_basedir() #self._loader.set_basedir(self._role_path) # load the role's files, if they exist library = os.path.join(self._role_path, 'library') if os.path.isdir(library): module_loader.add_directory(library) metadata = self._load_role_yaml('meta') if metadata: self._metadata = RoleMetadata.load(metadata, owner=self, loader=self._loader) self._dependencies = self._load_dependencies() task_data = self._load_role_yaml('tasks') if task_data: self._task_blocks = load_list_of_blocks(task_data, role=self, loader=self._loader) handler_data = self._load_role_yaml('handlers') if handler_data: self._handler_blocks = load_list_of_blocks(handler_data, role=self, loader=self._loader) # vars and default vars are regular dictionaries self._role_vars = self._load_role_yaml('vars') if not isinstance(self._role_vars, (dict, NoneType)): raise AnsibleParserError( "The vars/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name, obj=ds) elif self._role_vars is None: self._role_vars = dict() self._default_vars = self._load_role_yaml('defaults') if not isinstance(self._default_vars, (dict, NoneType)): raise AnsibleParserError( "The default/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name, obj=ds) elif self._default_vars is None: self._default_vars = dict()
def _add_host_to_keyed_groups(self, keys, variables, host, strict=False): ''' helper to create groups for plugins based on variable values and add the corresponding hosts to it''' if keys and isinstance(keys, list): for keyed in keys: if keyed and isinstance(keyed, dict): variables = combine_vars( variables, self.inventory.get_host(host).get_vars()) try: key = self._compose(keyed.get('key'), variables) except Exception as e: if strict: raise AnsibleParserError( "Could not generate group for host %s from %s entry: %s" % (host, keyed.get('key'), to_native(e))) continue if key: prefix = keyed.get('prefix', '') sep = keyed.get('separator', '_') raw_parent_name = keyed.get('parent_group', None) new_raw_group_names = [] if isinstance(key, string_types): new_raw_group_names.append(key) elif isinstance(key, list): for name in key: new_raw_group_names.append(name) elif isinstance(key, Mapping): for (gname, gval) in key.items(): name = '%s%s%s' % (gname, sep, gval) new_raw_group_names.append(name) else: raise AnsibleParserError( "Invalid group name format, expected a string or a list of them or dictionary, got: %s" % type(key)) for bare_name in new_raw_group_names: gname = self._sanitize_group_name( '%s%s%s' % (prefix, sep, bare_name)) result_gname = self.inventory.add_group(gname) self.inventory.add_child(result_gname, host) if raw_parent_name: parent_name = self._sanitize_group_name( raw_parent_name) self.inventory.add_group(parent_name) self.inventory.add_child( parent_name, result_gname) else: # exclude case of empty list and dictionary, because these are valid constructions # simply no groups need to be constructed, but are still falsy if strict and key not in ([], {}): raise AnsibleParserError( "No key or key resulted empty for %s in host %s, invalid entry" % (keyed.get('key'), host)) else: raise AnsibleParserError( "Invalid keyed group entry, it must be a dictionary: %s " % keyed)
def construct_sysparm_query(query): parsed, err = parse_query(query) if err: raise AnsibleParserError(err) return serialize_query(parsed)
def parse(self, inventory, loader, path, cache=True): super(InventoryModule, self).parse(inventory, loader, path) config_data = {} config_data = self._read_config_data(path) if self.get_option('use_contrib_script_compatible_sanitization'): self._sanitize_group_name = self._legacy_script_compatible_group_sanitization # get user specifications if 'zones' in config_data: if not isinstance(config_data['zones'], list): raise AnsibleParserError( "Zones must be a list in GCP inventory YAML files") # get user specifications if 'projects' not in config_data: raise AnsibleParserError( "Projects must be included in inventory YAML file") if not isinstance(config_data['projects'], list): raise AnsibleParserError( "Projects must be a list in GCP inventory YAML files") # add in documented defaults if 'filters' not in config_data: config_data['filters'] = None projects = config_data['projects'] zones = config_data.get('zones') config_data['scopes'] = ['https://www.googleapis.com/auth/compute'] query = self._get_query_options(config_data['filters']) # Cache logic if cache: cache = self.get_option('cache') cache_key = self.get_cache_key(path) else: cache_key = None cache_needs_update = False if cache: try: results = self.cache.get(cache_key) for project in results: for zone in results[project]: self._add_hosts(results[project][zone], config_data, False) except KeyError: cache_needs_update = True if not cache or cache_needs_update: cached_data = {} for project in projects: cached_data[project] = {} config_data['project'] = project if not zones: zones = self._get_zones(config_data) for zone in zones: config_data['zone'] = zone link = self.self_link(config_data) resp = self.fetch_list(config_data, link, query) self._add_hosts(resp.get('items'), config_data) cached_data[project][zone] = resp.get('items') if cache_needs_update: self.cache.set(cache_key, cached_data)
def _verify_includes_and_excludes(self, conditions): if conditions and all(i in conditions for i in ["includes", "excludes"]): raise AnsibleParserError( "Invalid configuration: 'includes' and 'excludes' are mutually exclusive." )
def _load_role_data(self, role_include, parent_role=None): self._role_name = role_include.role self._role_path = role_include.get_role_path() self._role_collection = role_include._role_collection self._role_params = role_include.get_role_params() self._variable_manager = role_include.get_variable_manager() self._loader = role_include.get_loader() if parent_role: self.add_parent(parent_role) # copy over all field attributes from the RoleInclude # update self._attributes directly, to avoid squashing for (attr_name, dump) in iteritems(self._valid_attrs): if attr_name in ('when', 'tags'): self._attributes[attr_name] = self._extend_value( self._attributes[attr_name], role_include._attributes[attr_name], ) else: self._attributes[attr_name] = role_include._attributes[ attr_name] # vars and default vars are regular dictionaries self._role_vars = self._load_role_yaml( 'vars', main=self._from_files.get('vars'), allow_dir=True) if self._role_vars is None: self._role_vars = {} elif not isinstance(self._role_vars, Mapping): raise AnsibleParserError( "The vars/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name) self._default_vars = self._load_role_yaml( 'defaults', main=self._from_files.get('defaults'), allow_dir=True) if self._default_vars is None: self._default_vars = {} elif not isinstance(self._default_vars, Mapping): raise AnsibleParserError( "The defaults/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name) # load the role's other files, if they exist metadata = self._load_role_yaml('meta') if metadata: self._metadata = RoleMetadata.load( metadata, owner=self, variable_manager=self._variable_manager, loader=self._loader) self._dependencies = self._load_dependencies() else: self._metadata = RoleMetadata() # reset collections list; roles do not inherit collections from parents, just use the defaults # FUTURE: use a private config default for this so we can allow it to be overridden later self.collections = [] # configure plugin/collection loading; either prepend the current role's collection or configure legacy plugin loading # FIXME: need exception for explicit ansible.legacy? if self._role_collection: # this is a collection-hosted role self.collections.insert(0, self._role_collection) else: # this is a legacy role, but set the default collection if there is one default_collection = AnsibleCollectionConfig.default_collection if default_collection: self.collections.insert(0, default_collection) # legacy role, ensure all plugin dirs under the role are added to plugin search path add_all_plugin_dirs(self._role_path) # collections can be specified in metadata for legacy or collection-hosted roles if self._metadata.collections: self.collections.extend((c for c in self._metadata.collections if c not in self.collections)) # if any collections were specified, ensure that core or legacy synthetic collections are always included if self.collections: # default append collection is core for collection-hosted roles, legacy for others default_append_collection = 'ansible.builtin' if self._role_collection else 'ansible.legacy' if 'ansible.builtin' not in self.collections and 'ansible.legacy' not in self.collections: self.collections.append(default_append_collection) task_data = self._load_role_yaml('tasks', main=self._from_files.get('tasks')) if self._should_validate: role_argspecs = self._get_role_argspecs() task_data = self._prepend_validation_task(task_data, role_argspecs) if task_data: try: self._task_blocks = load_list_of_blocks( task_data, play=self._play, role=self, loader=self._loader, variable_manager=self._variable_manager) except AssertionError as e: raise AnsibleParserError( "The tasks/main.yml file for role '%s' must contain a list of tasks" % self._role_name, obj=task_data, orig_exc=e) handler_data = self._load_role_yaml( 'handlers', main=self._from_files.get('handlers')) if handler_data: try: self._handler_blocks = load_list_of_blocks( handler_data, play=self._play, role=self, use_handlers=True, loader=self._loader, variable_manager=self._variable_manager) except AssertionError as e: raise AnsibleParserError( "The handlers/main.yml file for role '%s' must contain a list of tasks" % self._role_name, obj=handler_data, orig_exc=e)
def _parse_group(self, group, group_data): if isinstance(group_data, (MutableMapping, NoneType)): try: self.inventory.add_group(group) except AnsibleError as e: raise AnsibleParserError("Unable to add group %s: %s" % (group, to_text(e))) if group_data is not None: # make sure they are dicts for section in ['vars', 'children', 'hosts']: if section in group_data: # convert strings to dicts as these are allowed if isinstance(group_data[section], string_types): group_data[section] = {group_data[section]: None} if not isinstance(group_data[section], (MutableMapping, NoneType)): raise AnsibleParserError( 'Invalid "%s" entry for "%s" group, requires a dictionary, found "%s" instead.' % (section, group, type(group_data[section]))) for key in group_data: if not isinstance(group_data[key], (MutableMapping, NoneType)): self.display.warning( 'Skipping key (%s) in group (%s) as it is not a mapping, it is a %s' % (key, group, type(group_data[key]))) continue if isinstance(group_data[key], NoneType): self.display.vvv( 'Skipping empty key (%s) in group (%s)' % (key, group)) elif key == 'vars': for var in group_data[key]: self.inventory.set_variable( group, var, group_data[key][var]) elif key == 'children': for subgroup in group_data[key]: self._parse_group(subgroup, group_data[key][subgroup]) self.inventory.add_child(group, subgroup) elif key == 'hosts': for host_pattern in group_data[key]: hosts, port = self._parse_host(host_pattern) self._populate_host_vars( hosts, group_data[key][host_pattern] or {}, group, port) else: self.display.warning( 'Skipping unexpected key (%s) in group (%s), only "vars", "children" and "hosts" are valid' % (key, group)) else: self.display.warning( "Skipping '%s' as this is not a valid group definition" % group)
def _load_role_data(self, role_include, parent_role=None): self._role_name = role_include.role self._role_path = role_include.get_role_path() self._role_params = role_include.get_role_params() self._variable_manager = role_include.get_variable_manager() self._loader = role_include.get_loader() if parent_role: self.add_parent(parent_role) # copy over all field attributes, except for when and tags, which # are special cases and need to preserve pre-existing values for (attr_name, _) in iteritems(self._get_base_attributes()): if attr_name not in ('when', 'tags'): setattr(self, attr_name, getattr(role_include, attr_name)) current_when = getattr(self, 'when')[:] current_when.extend(role_include.when) setattr(self, 'when', current_when) current_tags = getattr(self, 'tags')[:] current_tags.extend(role_include.tags) setattr(self, 'tags', current_tags) # dynamically load any plugins from the role directory for name, obj in get_all_plugin_loaders(): if obj.subdir: plugin_path = os.path.join(self._role_path, obj.subdir) if os.path.isdir(plugin_path): obj.add_directory(plugin_path) # load the role's other files, if they exist metadata = self._load_role_yaml('meta') if metadata: self._metadata = RoleMetadata.load( metadata, owner=self, variable_manager=self._variable_manager, loader=self._loader) self._dependencies = self._load_dependencies() else: self._metadata = RoleMetadata() task_data = self._load_role_yaml('tasks') if task_data: try: self._task_blocks = load_list_of_blocks(task_data, play=self._play, role=self, loader=self._loader) except AssertionError: raise AnsibleParserError( "The tasks/main.yml file for role '%s' must contain a list of tasks" % self._role_name, obj=task_data) handler_data = self._load_role_yaml('handlers') if handler_data: try: self._handler_blocks = load_list_of_blocks(handler_data, play=self._play, role=self, use_handlers=True, loader=self._loader) except: raise AnsibleParserError( "The handlers/main.yml file for role '%s' must contain a list of tasks" % self._role_name, obj=task_data) # vars and default vars are regular dictionaries self._role_vars = self._load_role_yaml('vars') if self._role_vars is None: self._role_vars = dict() elif not isinstance(self._role_vars, dict): raise AnsibleParserError( "The vars/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name) self._default_vars = self._load_role_yaml('defaults') if self._default_vars is None: self._default_vars = dict() elif not isinstance(self._default_vars, dict): raise AnsibleParserError( "The default/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name)
def post_validate(self, templar): ''' we can't tell that everything is of the right type until we have all the variables. Run basic types (from isa) as well as any _post_validate_<foo> functions. ''' # save the omit value for later checking omit_value = templar._available_variables.get('omit') for (name, attribute) in iteritems(self._valid_attrs): if getattr(self, name) is None: if not attribute.required: continue else: raise AnsibleParserError( "the field '%s' is required but was not set" % name) elif not attribute.always_post_validate and self.__class__.__name__ not in ( 'Task', 'Handler', 'PlayContext'): # Intermediate objects like Play() won't have their fields validated by # default, as their values are often inherited by other objects and validated # later, so we don't want them to fail out early continue try: # Run the post-validator if present. These methods are responsible for # using the given templar to template the values, if required. method = getattr(self, '_post_validate_%s' % name, None) if method: value = method(attribute, getattr(self, name), templar) elif attribute.isa == 'class': value = getattr(self, name) else: # if the attribute contains a variable, template it now value = templar.template(getattr(self, name)) # if this evaluated to the omit value, set the value back to # the default specified in the FieldAttribute and move on if omit_value is not None and value == omit_value: setattr(self, name, attribute.default) continue # and make sure the attribute is of the type it should be if value is not None: if attribute.isa == 'string': value = to_text(value) elif attribute.isa == 'int': value = int(value) elif attribute.isa == 'float': value = float(value) elif attribute.isa == 'bool': value = boolean(value, strict=False) elif attribute.isa == 'percent': # special value, which may be an integer or float # with an optional '%' at the end if isinstance(value, string_types) and '%' in value: value = value.replace('%', '') value = float(value) elif attribute.isa in ('list', 'barelist'): if value is None: value = [] elif not isinstance(value, list): if isinstance(value, string_types ) and attribute.isa == 'barelist': display.deprecated( "Using comma separated values for a list has been deprecated. " "You should instead use the correct YAML syntax for lists. " ) value = value.split(',') else: value = [value] if attribute.listof is not None: for item in value: if not isinstance(item, attribute.listof): raise AnsibleParserError( "the field '%s' should be a list of %s, " "but the item '%s' is a %s" % (name, attribute.listof, item, type(item)), obj=self.get_ds()) elif attribute.required and attribute.listof == string_types: if item is None or item.strip() == "": raise AnsibleParserError( "the field '%s' is required, and cannot have empty values" % (name, ), obj=self.get_ds()) elif attribute.isa == 'set': if value is None: value = set() elif not isinstance(value, (list, set)): if isinstance(value, string_types): value = value.split(',') else: # Making a list like this handles strings of # text and bytes properly value = [value] if not isinstance(value, set): value = set(value) elif attribute.isa == 'dict': if value is None: value = dict() elif not isinstance(value, dict): raise TypeError("%s is not a dictionary" % value) elif attribute.isa == 'class': if not isinstance(value, attribute.class_type): raise TypeError( "%s is not a valid %s (got a %s instead)" % (name, attribute.class_type, type(value))) value.post_validate(templar=templar) # and assign the massaged value back to the attribute field setattr(self, name, value) except (TypeError, ValueError) as e: raise AnsibleParserError( "the field '%s' has an invalid value (%s), and could not be converted to an %s." "The error was: %s" % (name, value, attribute.isa, e), obj=self.get_ds(), orig_exc=e) except (AnsibleUndefinedVariable, UndefinedError) as e: if templar._fail_on_undefined_errors and name != 'name': if name == 'args': msg = "The task includes an option with an undefined variable. The error was: %s" % ( to_native(e)) else: msg = "The field '%s' has an invalid value, which includes an undefined variable. The error was: %s" % ( name, to_native(e)) raise AnsibleParserError(msg, obj=self.get_ds(), orig_exc=e) self._finalized = True
def _resolve_group(self, fq_group_name, mandatory=True): if not AnsibleCollectionRef.is_valid_fqcr(fq_group_name): collection_name = 'ansible.builtin' fq_group_name = collection_name + '.' + fq_group_name else: collection_name = '.'.join(fq_group_name.split('.')[0:2]) # Check if the group has already been resolved and cached if fq_group_name in self.play._group_actions: return fq_group_name, self.play._group_actions[fq_group_name] try: action_groups = _get_collection_metadata(collection_name).get( 'action_groups', {}) except ValueError: if not mandatory: display.vvvvv( "Error loading module_defaults: could not resolve the module_defaults group %s" % fq_group_name) return fq_group_name, [] raise AnsibleParserError( "Error loading module_defaults: could not resolve the module_defaults group %s" % fq_group_name) # The collection may or may not use the fully qualified name # Don't fail if the group doesn't exist in the collection resource_name = fq_group_name.split(collection_name + '.')[-1] action_group = action_groups.get(fq_group_name, action_groups.get(resource_name)) if action_group is None: if not mandatory: display.vvvvv( "Error loading module_defaults: could not resolve the module_defaults group %s" % fq_group_name) return fq_group_name, [] raise AnsibleParserError( "Error loading module_defaults: could not resolve the module_defaults group %s" % fq_group_name) resolved_actions = [] include_groups = [] found_group_metadata = False for action in action_group: # Everything should be a string except the metadata entry if not isinstance(action, string_types): _validate_action_group_metadata(action, found_group_metadata, fq_group_name) if isinstance(action['metadata'], dict): found_group_metadata = True include_groups = action['metadata'].get('extend_group', []) if isinstance(include_groups, string_types): include_groups = [include_groups] if not isinstance(include_groups, list): # Bad entries may be a warning above, but prevent tracebacks by setting it back to the acceptable type. include_groups = [] continue # The collection may or may not use the fully qualified name. # If not, it's part of the current collection. if not AnsibleCollectionRef.is_valid_fqcr(action): action = collection_name + '.' + action resolved_action = self._resolve_action(action, mandatory=False) if resolved_action: resolved_actions.append(resolved_action) for action in resolved_actions: if action not in self.play._action_groups: self.play._action_groups[action] = [] self.play._action_groups[action].append(fq_group_name) self.play._group_actions[fq_group_name] = resolved_actions # Resolve extended groups last, after caching the group in case they recursively refer to each other for include_group in include_groups: if not AnsibleCollectionRef.is_valid_fqcr(include_group): include_group_collection = collection_name include_group = collection_name + '.' + include_group else: include_group_collection = '.'.join( include_group.split('.')[0:2]) dummy, group_actions = self._resolve_group(include_group, mandatory=False) for action in group_actions: if action not in self.play._action_groups: self.play._action_groups[action] = [] self.play._action_groups[action].append(fq_group_name) self.play._group_actions[fq_group_name].extend(group_actions) resolved_actions.extend(group_actions) return fq_group_name, resolved_actions
def post_validate(self, templar): ''' we can't tell that everything is of the right type until we have all the variables. Run basic types (from isa) as well as any _post_validate_<foo> functions. ''' basedir = None if self._loader is not None: basedir = self._loader.get_basedir() # save the omit value for later checking omit_value = templar._available_variables.get('omit') for (name, attribute) in iteritems(self._get_base_attributes()): if getattr(self, name) is None: if not attribute.required: continue else: raise AnsibleParserError( "the field '%s' is required but was not set" % name) elif not attribute.always_post_validate and self.__class__.__name__ not in ( 'Task', 'PlayContext'): # Intermediate objects like Play() won't have their fields validated by # default, as their values are often inherited by other objects and validated # later, so we don't want them to fail out early continue try: # Run the post-validator if present. These methods are responsible for # using the given templar to template the values, if required. method = getattr(self, '_post_validate_%s' % name, None) if method: value = method(attribute, getattr(self, name), templar) else: # if the attribute contains a variable, template it now value = templar.template(getattr(self, name)) # if this evaluated to the omit value, set the value back to # the default specified in the FieldAttribute and move on if omit_value is not None and value == omit_value: value = attribute.default continue # and make sure the attribute is of the type it should be if value is not None: if attribute.isa == 'string': value = unicode(value) elif attribute.isa == 'int': value = int(value) elif attribute.isa == 'float': value = float(value) elif attribute.isa == 'bool': value = boolean(value) elif attribute.isa == 'percent': # special value, which may be an integer or float # with an optional '%' at the end if isinstance(value, string_types) and '%' in value: value = value.replace('%', '') value = float(value) elif attribute.isa == 'list': if value is None: value = [] elif not isinstance(value, list): value = [value] if attribute.listof is not None: for item in value: if not isinstance(item, attribute.listof): raise AnsibleParserError( "the field '%s' should be a list of %s, but the item '%s' is a %s" % (name, attribute.listof, item, type(item)), obj=self.get_ds()) elif attribute.required and attribute.listof == string_types: if item is None or item.strip() == "": raise AnsibleParserError( "the field '%s' is required, and cannot have empty values" % (name, ), obj=self.get_ds()) elif attribute.isa == 'set': if value is None: value = set() else: if not isinstance(value, (list, set)): value = [value] if not isinstance(value, set): value = set(value) elif attribute.isa == 'dict': if value is None: value = dict() elif not isinstance(value, dict): raise TypeError("%s is not a dictionary" % value) # and assign the massaged value back to the attribute field setattr(self, name, value) except (TypeError, ValueError) as e: raise AnsibleParserError( "the field '%s' has an invalid value (%s), and could not be converted to an %s. Error was: %s" % (name, value, attribute.isa, e), obj=self.get_ds()) except UndefinedError as e: if templar._fail_on_undefined_errors and name != 'name': raise AnsibleParserError( "the field '%s' has an invalid value, which appears to include a variable that is undefined. The error was: %s" % (name, e), obj=self.get_ds())
def post_validate(self, templar): ''' we can't tell that everything is of the right type until we have all the variables. Run basic types (from isa) as well as any _post_validate_<foo> functions. ''' # save the omit value for later checking omit_value = templar.available_variables.get('omit') for (name, attribute) in iteritems(self._valid_attrs): if attribute.static: value = getattr(self, name) # we don't template 'vars' but allow template as values for later use if name not in ('vars', ) and templar.is_template(value): display.warning( '"%s" is not templatable, but we found: %s, ' 'it will not be templated and will be used "as is".' % (name, value)) continue if getattr(self, name) is None: if not attribute.required: continue else: raise AnsibleParserError( "the field '%s' is required but was not set" % name) elif not attribute.always_post_validate and self.__class__.__name__ not in ( 'Task', 'Handler', 'PlayContext'): # Intermediate objects like Play() won't have their fields validated by # default, as their values are often inherited by other objects and validated # later, so we don't want them to fail out early continue try: # Run the post-validator if present. These methods are responsible for # using the given templar to template the values, if required. method = getattr(self, '_post_validate_%s' % name, None) if method: value = method(attribute, getattr(self, name), templar) elif attribute.isa == 'class': value = getattr(self, name) else: # if the attribute contains a variable, template it now value = templar.template(getattr(self, name)) # if this evaluated to the omit value, set the value back to # the default specified in the FieldAttribute and move on if omit_value is not None and value == omit_value: if callable(attribute.default): setattr(self, name, attribute.default()) else: setattr(self, name, attribute.default) continue # and make sure the attribute is of the type it should be if value is not None: value = self.get_validated_value(name, attribute, value, templar) # and assign the massaged value back to the attribute field setattr(self, name, value) except (TypeError, ValueError) as e: value = getattr(self, name) raise AnsibleParserError( "the field '%s' has an invalid value (%s), and could not be converted to an %s." "The error was: %s" % (name, value, attribute.isa, e), obj=self.get_ds(), orig_exc=e) except (AnsibleUndefinedVariable, UndefinedError) as e: if templar._fail_on_undefined_errors and name != 'name': if name == 'args': msg = "The task includes an option with an undefined variable. The error was: %s" % ( to_native(e)) else: msg = "The field '%s' has an invalid value, which includes an undefined variable. The error was: %s" % ( name, to_native(e)) raise AnsibleParserError(msg, obj=self.get_ds(), orig_exc=e) self._finalized = True
def preprocess_data(self, ds): ''' tasks are especially complex arguments so need pre-processing. keep it short. ''' if not isinstance(ds, dict): raise AnsibleAssertionError( 'ds (%s) should be a dict but was a %s' % (ds, type(ds))) # the new, cleaned datastructure, which will have legacy # items reduced to a standard structure suitable for the # attributes of the task class new_ds = AnsibleMapping() if isinstance(ds, AnsibleBaseYAMLObject): new_ds.ansible_pos = ds.ansible_pos # since this affects the task action parsing, we have to resolve in preprocess instead of in typical validator default_collection = AnsibleCollectionConfig.default_collection collections_list = ds.get('collections') if collections_list is None: # use the parent value if our ds doesn't define it collections_list = self.collections else: # Validate this untemplated field early on to guarantee we are dealing with a list. # This is also done in CollectionSearch._load_collections() but this runs before that call. collections_list = self.get_validated_value( 'collections', self._collections, collections_list, None) if default_collection and not self._role: # FIXME: and not a collections role if collections_list: if default_collection not in collections_list: collections_list.insert(0, default_collection) else: collections_list = [default_collection] if collections_list and 'ansible.builtin' not in collections_list and 'ansible.legacy' not in collections_list: collections_list.append('ansible.legacy') if collections_list: ds['collections'] = collections_list # use the args parsing class to determine the action, args, # and the delegate_to value from the various possible forms # supported as legacy args_parser = ModuleArgsParser(task_ds=ds, collection_list=collections_list) try: (action, args, delegate_to) = args_parser.parse() except AnsibleParserError as e: # if the raises exception was created with obj=ds args, then it includes the detail # so we dont need to add it so we can just re raise. if e.obj: raise # But if it wasn't, we can add the yaml object now to get more detail raise AnsibleParserError(to_native(e), obj=ds, orig_exc=e) else: self.resolved_action = args_parser.resolved_action # the command/shell/script modules used to support the `cmd` arg, # which corresponds to what we now call _raw_params, so move that # value over to _raw_params (assuming it is empty) if action in C._ACTION_HAS_CMD: if 'cmd' in args: if args.get('_raw_params', '') != '': raise AnsibleError( "The 'cmd' argument cannot be used when other raw parameters are specified." " Please put everything in one or the other place.", obj=ds) args['_raw_params'] = args.pop('cmd') new_ds['action'] = action new_ds['args'] = args new_ds['delegate_to'] = delegate_to # we handle any 'vars' specified in the ds here, as we may # be adding things to them below (special handling for includes). # When that deprecated feature is removed, this can be too. if 'vars' in ds: # _load_vars is defined in Base, and is used to load a dictionary # or list of dictionaries in a standard way new_ds['vars'] = self._load_vars(None, ds.get('vars')) else: new_ds['vars'] = dict() for (k, v) in ds.items(): if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell': # we don't want to re-assign these values, which were determined by the ModuleArgsParser() above continue elif k.startswith('with_') and k.replace("with_", "") in lookup_loader: # transform into loop property self._preprocess_with_loop(ds, new_ds, k, v) elif C.INVALID_TASK_ATTRIBUTE_FAILED or k in self._valid_attrs: new_ds[k] = v else: display.warning("Ignoring invalid attribute: %s" % k) return super(Task, self).preprocess_data(new_ds)
def parse(self, skip_action_validation=False): ''' Given a task in one of the supported forms, parses and returns returns the action, arguments, and delegate_to values for the task, dealing with all sorts of levels of fuzziness. ''' thing = None action = None delegate_to = self._task_ds.get('delegate_to', Sentinel) args = dict() # This is the standard YAML form for command-type modules. We grab # the args and pass them in as additional arguments, which can/will # be overwritten via dict updates from the other arg sources below additional_args = self._task_ds.get('args', dict()) # We can have one of action, local_action, or module specified # action if 'action' in self._task_ds: # an old school 'action' statement thing = self._task_ds['action'] action, args = self._normalize_parameters(thing, action=action, additional_args=additional_args) # local_action if 'local_action' in self._task_ds: # local_action is similar but also implies a delegate_to if action is not None: raise AnsibleParserError("action and local_action are mutually exclusive", obj=self._task_ds) thing = self._task_ds.get('local_action', '') delegate_to = 'localhost' action, args = self._normalize_parameters(thing, action=action, additional_args=additional_args) # module: <stuff> is the more new-style invocation # filter out task attributes so we're only querying unrecognized keys as actions/modules non_task_ds = dict((k, v) for k, v in iteritems(self._task_ds) if (k not in self._task_attrs) and (not k.startswith('with_'))) # walk the filtered input dictionary to see if we recognize a module name for item, value in iteritems(non_task_ds): context = None is_action_candidate = False if item in BUILTIN_TASKS: is_action_candidate = True elif skip_action_validation: is_action_candidate = True else: context = action_loader.find_plugin_with_context(item, collection_list=self._collection_list) if not context.resolved: context = module_loader.find_plugin_with_context(item, collection_list=self._collection_list) is_action_candidate = context.resolved and bool(context.redirect_list) if is_action_candidate: # finding more than one module name is a problem if action is not None: raise AnsibleParserError("conflicting action statements: %s, %s" % (action, item), obj=self._task_ds) if context is not None and context.resolved: self.resolved_action = context.resolved_fqcn action = item thing = value action, args = self._normalize_parameters(thing, action=action, additional_args=additional_args) # if we didn't see any module in the task at all, it's not a task really if action is None: if non_task_ds: # there was one non-task action, but we couldn't find it bad_action = list(non_task_ds.keys())[0] raise AnsibleParserError("couldn't resolve module/action '{0}'. This often indicates a " "misspelling, missing collection, or incorrect module path.".format(bad_action), obj=self._task_ds) else: raise AnsibleParserError("no module/action detected in task.", obj=self._task_ds) elif args.get('_raw_params', '') != '' and action not in RAW_PARAM_MODULES: templar = Templar(loader=None) raw_params = args.pop('_raw_params') if templar.is_template(raw_params): args['_variable_params'] = raw_params else: raise AnsibleParserError("this task '%s' has extra params, which is only allowed in the following modules: %s" % (action, ", ".join(RAW_PARAM_MODULES)), obj=self._task_ds) return (action, args, delegate_to)
def get_vars(self, play=None, host=None, task=None, include_hostvars=True, include_delegate_to=True, use_cache=True, _hosts=None, _hosts_all=None, stage='task'): ''' Returns the variables, with optional "context" given via the parameters for the play, host, and task (which could possibly result in different sets of variables being returned due to the additional context). The order of precedence is: - play->roles->get_default_vars (if there is a play context) - group_vars_files[host] (if there is a host context) - host_vars_files[host] (if there is a host context) - host->get_vars (if there is a host context) - fact_cache[host] (if there is a host context) - play vars (if there is a play context) - play vars_files (if there's no host context, ignore file names that cannot be templated) - task->get_vars (if there is a task context) - vars_cache[host] (if there is a host context) - extra vars ``_hosts`` and ``_hosts_all`` should be considered private args, with only internal trusted callers relying on the functionality they provide. These arguments may be removed at a later date without a deprecation period and without warning. ''' display.debug("in VariableManager get_vars()") all_vars = dict() magic_variables = self._get_magic_variables( play=play, host=host, task=task, include_hostvars=include_hostvars, include_delegate_to=include_delegate_to, _hosts=_hosts, _hosts_all=_hosts_all, ) _vars_sources = {} def _combine_and_track(data, new_data, source): ''' Wrapper function to update var sources dict and call combine_vars() See notes in the VarsWithSources docstring for caveats and limitations of the source tracking ''' if C.DEFAULT_DEBUG: # Populate var sources dict for key in new_data: _vars_sources[key] = source return combine_vars(data, new_data) # default for all cases basedirs = [] if self.safe_basedir: # avoid adhoc/console loading cwd basedirs = [self._loader.get_basedir()] if play: # first we compile any vars specified in defaults/main.yml # for all roles within the specified play for role in play.get_roles(): all_vars = _combine_and_track(all_vars, role.get_default_vars(), "role '%s' defaults" % role.name) if task: # set basedirs if C.PLAYBOOK_VARS_ROOT == 'all': # should be default basedirs = task.get_search_path() elif C.PLAYBOOK_VARS_ROOT in ( 'bottom', 'playbook_dir'): # only option in 2.4.0 basedirs = [task.get_search_path()[0]] elif C.PLAYBOOK_VARS_ROOT != 'top': # preserves default basedirs, only option pre 2.3 raise AnsibleError('Unknown playbook vars logic: %s' % C.PLAYBOOK_VARS_ROOT) # if we have a task in this context, and that task has a role, make # sure it sees its defaults above any other roles, as we previously # (v1) made sure each task had a copy of its roles default vars if task._role is not None and (play or task.action in C._ACTION_INCLUDE_ROLE): all_vars = _combine_and_track( all_vars, task._role.get_default_vars( dep_chain=task.get_dep_chain()), "role '%s' defaults" % task._role.name) if host: # THE 'all' group and the rest of groups for a host, used below all_group = self._inventory.groups.get('all') host_groups = sort_groups( [g for g in host.get_groups() if g.name not in ['all']]) def _get_plugin_vars(plugin, path, entities): data = {} try: data = plugin.get_vars(self._loader, path, entities) except AttributeError: try: for entity in entities: if isinstance(entity, Host): data.update(plugin.get_host_vars(entity.name)) else: data.update(plugin.get_group_vars(entity.name)) except AttributeError: if hasattr(plugin, 'run'): raise AnsibleError( "Cannot use v1 type vars plugin %s from %s" % (plugin._load_name, plugin._original_path)) else: raise AnsibleError( "Invalid vars plugin %s from %s" % (plugin._load_name, plugin._original_path)) return data # internal functions that actually do the work def _plugins_inventory(entities): ''' merges all entities by inventory source ''' return get_vars_from_inventory_sources( self._loader, self._inventory._sources, entities, stage) def _plugins_play(entities): ''' merges all entities adjacent to play ''' data = {} for path in basedirs: data = _combine_and_track( data, get_vars_from_path(self._loader, path, entities, stage), "path '%s'" % path) return data # configurable functions that are sortable via config, remember to add to _ALLOWED if expanding this list def all_inventory(): return all_group.get_vars() def all_plugins_inventory(): return _plugins_inventory([all_group]) def all_plugins_play(): return _plugins_play([all_group]) def groups_inventory(): ''' gets group vars from inventory ''' return get_group_vars(host_groups) def groups_plugins_inventory(): ''' gets plugin sources from inventory for groups ''' return _plugins_inventory(host_groups) def groups_plugins_play(): ''' gets plugin sources from play for groups ''' return _plugins_play(host_groups) def plugins_by_groups(): ''' merges all plugin sources by group, This should be used instead, NOT in combination with the other groups_plugins* functions ''' data = {} for group in host_groups: data[group] = _combine_and_track( data[group], _plugins_inventory(group), "inventory group_vars for '%s'" % group) data[group] = _combine_and_track( data[group], _plugins_play(group), "playbook group_vars for '%s'" % group) return data # Merge groups as per precedence config # only allow to call the functions we want exposed for entry in C.VARIABLE_PRECEDENCE: if entry in self._ALLOWED: display.debug('Calling %s to load vars for %s' % (entry, host.name)) all_vars = _combine_and_track( all_vars, locals()[entry](), "group vars, precedence entry '%s'" % entry) else: display.warning( 'Ignoring unknown variable precedence entry: %s' % (entry)) # host vars, from inventory, inventory adjacent and play adjacent via plugins all_vars = _combine_and_track(all_vars, host.get_vars(), "host vars for '%s'" % host) all_vars = _combine_and_track( all_vars, _plugins_inventory([host]), "inventory host_vars for '%s'" % host) all_vars = _combine_and_track(all_vars, _plugins_play([host]), "playbook host_vars for '%s'" % host) # finally, the facts caches for this host, if it exists # TODO: cleaning of facts should eventually become part of taskresults instead of vars try: facts = wrap_var(self._fact_cache.get(host.name, {})) all_vars.update(namespace_facts(facts)) # push facts to main namespace if C.INJECT_FACTS_AS_VARS: all_vars = _combine_and_track(all_vars, wrap_var(clean_facts(facts)), "facts") else: # always 'promote' ansible_local all_vars = _combine_and_track( all_vars, wrap_var( {'ansible_local': facts.get('ansible_local', {})}), "facts") except KeyError: pass if play: all_vars = _combine_and_track(all_vars, play.get_vars(), "play vars") vars_files = play.get_vars_files() try: for vars_file_item in vars_files: # create a set of temporary vars here, which incorporate the extra # and magic vars so we can properly template the vars_files entries # NOTE: this makes them depend on host vars/facts so things like # ansible_facts['os_distribution'] can be used, ala include_vars. # Consider DEPRECATING this in the future, since we have include_vars ... temp_vars = combine_vars(all_vars, self._extra_vars) temp_vars = combine_vars(temp_vars, magic_variables) templar = Templar(loader=self._loader, variables=temp_vars) # we assume each item in the list is itself a list, as we # support "conditional includes" for vars_files, which mimics # the with_first_found mechanism. vars_file_list = vars_file_item if not isinstance(vars_file_list, list): vars_file_list = [vars_file_list] # now we iterate through the (potential) files, and break out # as soon as we read one from the list. If none are found, we # raise an error, which is silently ignored at this point. try: for vars_file in vars_file_list: vars_file = templar.template(vars_file) if not (isinstance(vars_file, Sequence)): raise AnsibleError( "Invalid vars_files entry found: %r\n" "vars_files entries should be either a string type or " "a list of string types after template expansion" % vars_file) try: play_search_stack = play.get_search_path() found_file = real_file = self._loader.path_dwim_relative_stack( play_search_stack, 'vars', vars_file) data = preprocess_vars( self._loader.load_from_file(found_file, unsafe=True, cache=False)) if data is not None: for item in data: all_vars = _combine_and_track( all_vars, item, "play vars_files from '%s'" % vars_file) break except AnsibleFileNotFound: # we continue on loader failures continue except AnsibleParserError: raise else: # if include_delegate_to is set to False or we don't have a host, we ignore the missing # vars file here because we're working on a delegated host or require host vars, see NOTE above if include_delegate_to and host: raise AnsibleFileNotFound( "vars file %s was not found" % vars_file_item) except (UndefinedError, AnsibleUndefinedVariable): if host is not None and self._fact_cache.get( host.name, dict()).get( 'module_setup') and task is not None: raise AnsibleUndefinedVariable( "an undefined variable was found when attempting to template the vars_files item '%s'" % vars_file_item, obj=vars_file_item) else: # we do not have a full context here, and the missing variable could be because of that # so just show a warning and continue display.vvv( "skipping vars_file '%s' due to an undefined variable" % vars_file_item) continue display.vvv("Read vars_file '%s'" % vars_file_item) except TypeError: raise AnsibleParserError( "Error while reading vars files - please supply a list of file names. " "Got '%s' of type %s" % (vars_files, type(vars_files))) # By default, we now merge in all vars from all roles in the play, # unless the user has disabled this via a config option if not C.DEFAULT_PRIVATE_ROLE_VARS: for role in play.get_roles(): all_vars = _combine_and_track( all_vars, role.get_vars(include_params=False), "role '%s' vars" % role.name) # next, we merge in the vars from the role, which will specifically # follow the role dependency chain, and then we merge in the tasks # vars (which will look at parent blocks/task includes) if task: if task._role: all_vars = _combine_and_track( all_vars, task._role.get_vars(task.get_dep_chain(), include_params=False), "role '%s' vars" % task._role.name) all_vars = _combine_and_track(all_vars, task.get_vars(), "task vars") # next, we merge in the vars cache (include vars) and nonpersistent # facts cache (set_fact/register), in that order if host: # include_vars non-persistent cache all_vars = _combine_and_track( all_vars, self._vars_cache.get(host.get_name(), dict()), "include_vars") # fact non-persistent cache all_vars = _combine_and_track( all_vars, self._nonpersistent_fact_cache.get(host.name, dict()), "set_fact") # next, we merge in role params and task include params if task: if task._role: all_vars = _combine_and_track( all_vars, task._role.get_role_params(task.get_dep_chain()), "role '%s' params" % task._role.name) # special case for include tasks, where the include params # may be specified in the vars field for the task, which should # have higher precedence than the vars/np facts above all_vars = _combine_and_track(all_vars, task.get_include_params(), "include params") # extra vars all_vars = _combine_and_track(all_vars, self._extra_vars, "extra vars") # magic variables all_vars = _combine_and_track(all_vars, magic_variables, "magic vars") # special case for the 'environment' magic variable, as someone # may have set it as a variable and we don't want to stomp on it if task: all_vars['environment'] = task.environment # 'vars' magic var if task or play: # has to be copy, otherwise recursive ref all_vars['vars'] = all_vars.copy() # if we have a host and task and we're delegating to another host, # figure out the variables for that host now so we don't have to rely on host vars later if task and host and task.delegate_to is not None and include_delegate_to: all_vars['ansible_delegated_vars'], all_vars[ '_ansible_loop_cache'] = self._get_delegated_vars( play, task, all_vars) display.debug("done with get_vars()") if C.DEFAULT_DEBUG: # Use VarsWithSources wrapper class to display var sources return VarsWithSources.new_vars_with_sources( all_vars, _vars_sources) else: return all_vars
def _load_included_file(self, included_file, iterator, is_handler=False): ''' Loads an included YAML file of tasks, applying the optional set of variables. ''' display.debug("loading included file: %s" % included_file._filename) try: data = self._loader.load_from_file(included_file._filename) if data is None: return [] elif not isinstance(data, list): raise AnsibleError( "included task files must contain a list of tasks") ti_copy = included_file._task.copy() temp_vars = ti_copy.vars.copy() temp_vars.update(included_file._args) # pop tags out of the include args, if they were specified there, and assign # them to the include. If the include already had tags specified, we raise an # error so that users know not to specify them both ways tags = included_file._task.vars.pop('tags', []) if isinstance(tags, string_types): tags = tags.split(',') if len(tags) > 0: if len(included_file._task.tags) > 0: raise AnsibleParserError( "Include tasks should not specify tags in more than one way (both via args and directly on the task). " "Mixing tag specify styles is prohibited for whole import hierarchy, not only for single import statement", obj=included_file._task._ds) display.deprecated( "You should not specify tags in the include parameters. All tags should be specified using the task-level option" ) included_file._task.tags = tags ti_copy.vars = temp_vars block_list = load_list_of_blocks( data, play=iterator._play, parent_block=None, task_include=ti_copy, role=included_file._task._role, use_handlers=is_handler, loader=self._loader, variable_manager=self._variable_manager, ) # since we skip incrementing the stats when the task result is # first processed, we do so now for each host in the list for host in included_file._hosts: self._tqm._stats.increment('ok', host.name) except AnsibleError as e: # mark all of the hosts including this file as failed, send callbacks, # and increment the stats for this host for host in included_file._hosts: tr = TaskResult(host=host, task=included_file._task, return_data=dict(failed=True, reason=to_text(e))) iterator.mark_host_failed(host) self._tqm._failed_hosts[host.name] = True self._tqm._stats.increment('failures', host.name) self._tqm.send_callback('v2_runner_on_failed', tr) return [] # finally, send the callback and return the list of blocks loaded self._tqm.send_callback('v2_playbook_on_include', included_file) display.debug("done processing included file") return block_list
def parse(self, inventory, loader, path, cache=True): super(InventoryModule, self).parse(inventory, loader, path) cache_key = self.get_cache_prefix(path) # file is config file try: data = self.loader.load_from_file(path) except Exception as e: raise AnsibleParserError(e) if not data or data.get('plugin') != self.NAME: # this is not my config file return False if cache and cache_key in inventory.cache: source_data = inventory.cache[cache_key] else: pwfile = to_bytes(data.get('settings_password_file')) running = data.get('running_only', False) # start getting data cmd = [self.VBOX, 'list', '-l'] if running: cmd.append('runningvms') else: cmd.append('vms') if pwfile and os.path.exists(pwfile): cmd.append('--settingspwfile') cmd.append(pwfile) try: p = Popen(cmd, stdout=PIPE) except Exception as e: AnsibleParserError(e) source_data = p.stdout.readlines() inventory.cache[cache_key] = source_data hostvars = {} prevkey = pref_k = '' current_host = None # needed to possibly set ansible_host netinfo = data.get('network_info_path', "/VirtualBox/GuestInfo/Net/0/V4/IP") for line in source_data: try: k, v = line.split(':', 1) except: # skip non splitable continue if k.strip() == '': # skip empty continue v = v.strip() # found host if k.startswith( 'Name' ) and ',' not in v: # some setting strings appear in Name current_host = v if current_host not in hostvars: hostvars[current_host] = {} self.inventory.add_host(current_host) # try to get network info netdata = self.query_vbox_data(current_host, netinfo) if netdata: self.inventory.set_variable(current_host, 'ansible_host', netdata) # found groups elif k == 'Groups': for group in v.split('/'): if group: self.inventory.add_group(group) self.inventory.add_child(group, current_host) continue else: # found vars, accumulate in hostvars for clean inventory set pref_k = 'vbox_' + k.strip().replace(' ', '_') if k.startswith(' '): if prevkey not in hostvars[current_host]: hostvars[current_host][prevkey] = {} hostvars[current_host][prevkey][pref_k] = v else: if v != '': hostvars[current_host][pref_k] = v prevkey = pref_k # set vars in inventory from hostvars for host in hostvars: # create vars from vbox properties if data.get('query') and isinstance(data['query'], dict): for varname in data['query']: hostvars[host][varname] = self.query_vbox_data( host, data['query'][varname]) # create composite vars if data.get('compose') and isinstance(data['compose'], dict): for varname in data['compose']: hostvars[host][varname] = self._compose( data['compose'][varname], hostvars[host]) # actually update inventory for key in hostvars[host]: self.inventory.set_variable(host, key, hostvars[host][key])
def parse(self, inventory, loader, path, cache=False): super(InventoryModule, self).parse(inventory, loader, path) cache_key = self.get_cache_prefix(path) # file is config file try: self._config_data = self.loader.load_from_file(path) except Exception as e: raise AnsibleParserError(e) msg = '' if not self._config_data: msg = 'File empty. this is not my config file' elif 'plugin' in self._config_data and self._config_data[ 'plugin'] != self.NAME: msg = 'plugin config file, but not for us: %s' % self._config_data[ 'plugin'] elif 'plugin' not in self._config_data and 'clouds' not in self._config_data: msg = "it's not a plugin configuration nor a clouds.yaml file" elif not HAS_SHADE: msg = "shade is required for the OpenStack inventory plugin. OpenStack inventory sources will be skipped." if msg: raise AnsibleParserError(msg) # The user has pointed us at a clouds.yaml file. Use defaults for # everything. if 'clouds' in self._config_data: self._config_data = {} source_data = None if cache and cache_key in self.cache: try: source_data = self.cache[cache_key] except KeyError: pass if not source_data: clouds_yaml_path = self._config_data.get('clouds_yaml_path') if clouds_yaml_path: config_files = (clouds_yaml_path + os_client_config.config.CONFIG_FILES) else: config_files = None # TODO(mordred) Integrate shade's logging with ansible's logging shade.simple_logging() cloud_inventory = shade.inventory.OpenStackInventory( config_files=config_files, private=self._config_data.get('private', False)) only_clouds = self._config_data.get('only_clouds', []) if only_clouds and not isinstance(only_clouds, list): raise ValueError( 'OpenStack Inventory Config Error: only_clouds must be' ' a list') if only_clouds: new_clouds = [] for cloud in cloud_inventory.clouds: if cloud.name in only_clouds: new_clouds.append(cloud) cloud_inventory.clouds = new_clouds expand_hostvars = self._config_data.get('expand_hostvars', False) fail_on_errors = self._config_data.get('fail_on_errors', False) source_data = cloud_inventory.list_hosts( expand=expand_hostvars, fail_on_cloud_config=fail_on_errors) self.cache[cache_key] = source_data self._populate_from_source(source_data)