def munge(self, ds): ''' Regorganizes the data for a TaskInclude datastructure to line up with what we expect the proper attributes to be ''' assert isinstance(ds, dict) # the new, cleaned datastructure, which will have legacy # items reduced to a standard structure new_ds = AnsibleMapping() if isinstance(ds, AnsibleBaseYAMLObject): new_ds.copy_position_info(ds) for (k,v) in ds.iteritems(): if k == 'include': self._munge_include(ds, new_ds, k, v) elif k.replace("with_", "") in lookup_finder: self._munge_loop(ds, new_ds, k, v) else: # some basic error checking, to make sure vars are properly # formatted and do not conflict with k=v parameters # FIXME: we could merge these instead, but controlling the order # in which they're encountered could be difficult if k == 'vars': if 'vars' in new_ds: raise AnsibleParserError("include parameters cannot be mixed with 'vars' entries for include statements", obj=ds) elif not isinstance(v, dict): raise AnsibleParserError("vars for include statements must be specified as a dictionary", obj=ds) new_ds[k] = v return new_ds
def construct_mapping(self, node, deep=False): # Most of this is from yaml.constructor.SafeConstructor. We replicate # it here so that we can warn users when they have duplicate dict keys # (pyyaml silently allows overwriting keys) if not isinstance(node, MappingNode): raise ConstructorError(None, None, "expected a mapping node, but found %s" % node.id, node.start_mark) self.flatten_mapping(node) mapping = AnsibleMapping() # Add our extra information to the returned value mapping.ansible_pos = self._node_position_info(node) for key_node, value_node in node.value: key = self.construct_object(key_node, deep=deep) try: hash(key) except TypeError as exc: raise ConstructorError("while constructing a mapping", node.start_mark, "found unacceptable key (%s)" % exc, key_node.start_mark) if key in mapping: display.warning('While constructing a mapping from {1}, line {2}, column {3}, found a duplicate dict key ({0}). Using last defined value only.'.format(key, *mapping.ansible_pos)) value = self.construct_object(value_node, deep=deep) mapping[key] = value return mapping
def preprocess_data(self, ds): ''' Regorganizes the data for a PlaybookInclude datastructure to line up with what we expect the proper attributes to be ''' assert isinstance(ds, dict) # the new, cleaned datastructure, which will have legacy # items reduced to a standard structure new_ds = AnsibleMapping() if isinstance(ds, AnsibleBaseYAMLObject): new_ds.ansible_pos = ds.ansible_pos for (k,v) in iteritems(ds): if k == 'include': self._preprocess_include(ds, new_ds, k, v) else: # some basic error checking, to make sure vars are properly # formatted and do not conflict with k=v parameters if k == 'vars': if 'vars' in new_ds: raise AnsibleParserError("include parameters cannot be mixed with 'vars' entries for include statements", obj=ds) elif not isinstance(v, dict): raise AnsibleParserError("vars for include statements must be specified as a dictionary", obj=ds) new_ds[k] = v return super(PlaybookInclude, self).preprocess_data(new_ds)
def preprocess_data(self, ds): ''' tasks are especially complex arguments so need pre-processing. keep it short. ''' assert isinstance(ds, dict) # the new, cleaned datastructure, which will have legacy # items reduced to a standard structure suitable for the # attributes of the task class new_ds = AnsibleMapping() if isinstance(ds, AnsibleBaseYAMLObject): new_ds.ansible_pos = ds.ansible_pos # use the args parsing class to determine the action, args, # and the delegate_to value from the various possible forms # supported as legacy args_parser = ModuleArgsParser(task_ds=ds) (action, args, connection) = args_parser.parse() new_ds['action'] = action new_ds['args'] = args new_ds['connection'] = connection # we handle any 'vars' specified in the ds here, as we may # be adding things to them below (special handling for includes). # When that deprecated feature is removed, this can be too. if 'vars' in ds: # _load_vars is defined in Base, and is used to load a dictionary # or list of dictionaries in a standard way new_ds['vars'] = self._load_vars(None, ds.pop('vars')) else: new_ds['vars'] = dict() for (k,v) in iteritems(ds): if k in ('action', 'local_action', 'args', 'connection') or k == action or k == 'shell': # we don't want to re-assign these values, which were # determined by the ModuleArgsParser() above continue elif k.replace("with_", "") in lookup_loader: self._preprocess_loop(ds, new_ds, k, v) else: # pre-2.0 syntax allowed variables for include statements at the # top level of the task, so we move those into the 'vars' dictionary # here, and show a deprecation message as we will remove this at # some point in the future. if action == 'include' and k not in self._get_base_attributes() and k not in self.DEPRECATED_ATTRIBUTES: self._display.deprecated("Specifying include variables at the top-level of the task is deprecated. Please see:\nhttp://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\nfor currently supported syntax regarding included files and variables") new_ds['vars'][k] = v else: new_ds[k] = v return super(Task, self).preprocess_data(new_ds)
def construct_mapping(self, node, deep=False): ret = AnsibleMapping(super(Constructor, self).construct_mapping(node, deep)) ret._line_number = node.__line__ ret._column_number = node.__column__ # in some cases, we may have pre-read the data and then # passed it to the load() call for YAML, in which case we # want to override the default datasource (which would be # '<string>') to the actual filename we read in if self._ansible_file_name: ret._data_source = self._ansible_file_name else: ret._data_source = node.__datasource__ return ret
def raw_triage(self, key_string, item, patterns): # process dict values if isinstance(item, AnsibleMapping): return AnsibleMapping( dict((key, self.raw_triage('.'.join([key_string, key]), value, patterns)) for key, value in item.iteritems())) # process list values elif isinstance(item, AnsibleSequence): return AnsibleSequence([ self.raw_triage('.'.join([key_string, str(i)]), value, patterns) for i, value in enumerate(item) ]) # wrap values if they match raw_vars pattern elif isinstance(item, AnsibleUnicode): match = next( (pattern for pattern in patterns if re.match(pattern, key_string)), None) return AnsibleUnicode(''.join( ['{% raw %}', item, '{% endraw %}'])) if not item.startswith( ('{% raw', '{%raw')) and match else item
def raw_triage(self, key_string, item, patterns): # process dict values if isinstance(item, AnsibleMapping): return AnsibleMapping( dict((key, self.raw_triage('.'.join([key_string, key]), value, patterns)) for key, value in iteritems(item))) # process list values elif isinstance(item, AnsibleSequence): return AnsibleSequence([ self.raw_triage('.'.join([key_string, str(i)]), value, patterns) for i, value in enumerate(item) ]) # wrap values if they match raw_vars pattern elif isinstance(item, AnsibleUnicode): match = next( (pattern for pattern in patterns if re.match(pattern, key_string)), None) return wrap_var(item) if match else item else: return item
def preprocess_data(self, ds): # role names that are simply numbers can be parsed by PyYAML # as integers even when quoted, so turn it into a string type if isinstance(ds, int): ds = "%s" % ds if not isinstance(ds, dict) and not isinstance( ds, string_types) and not isinstance(ds, AnsibleBaseYAMLObject): raise AnsibleAssertionError() if isinstance(ds, dict): ds = super(RoleDefinition, self).preprocess_data(ds) # save the original ds for use later self._ds = ds # we create a new data structure here, using the same # object used internally by the YAML parsing code so we # can preserve file:line:column information if it exists new_ds = AnsibleMapping() if isinstance(ds, AnsibleBaseYAMLObject): new_ds.ansible_pos = ds.ansible_pos # first we pull the role name out of the data structure, # and then use that to determine the role path (which may # result in a new role name, if it was a file path) role_name = self._load_role_name(ds) (role_name, role_path) = self._load_role_path(role_name) # next, we split the role params out from the valid role # attributes and update the new datastructure with that # result and the role name if isinstance(ds, dict): (new_role_def, role_params) = self._split_role_params(ds) new_ds.update(new_role_def) self._role_params = role_params # set the role name in the new ds new_ds['role'] = role_name # we store the role path internally self._role_path = role_path # and return the cleaned-up data structure return new_ds
def construct_mapping(self, node, deep=False): # Most of this is from yaml.constructor.SafeConstructor. We replicate # it here so that we can warn users when they have duplicate dict keys # (pyyaml silently allows overwriting keys) if not isinstance(node, MappingNode): raise ConstructorError( None, None, "expected a mapping node, but found %s" % node.id, node.start_mark) self.flatten_mapping(node) mapping = AnsibleMapping() # Add our extra information to the returned value mapping.ansible_pos = self._node_position_info(node) for key_node, value_node in node.value: key = self.construct_object(key_node, deep=deep) try: hash(key) except TypeError as exc: raise ConstructorError("while constructing a mapping", node.start_mark, "found unacceptable key (%s)" % exc, key_node.start_mark) if key in mapping: msg = ( u'While constructing a mapping from {1}, line {2}, column {3}, found a duplicate dict key ({0}).' u' Using last defined value only.'.format( key, *mapping.ansible_pos)) if C.DUPLICATE_YAML_DICT_KEY == 'warn': display.warning(msg) elif C.DUPLICATE_YAML_DICT_KEY == 'error': raise ConstructorError(context=None, context_mark=None, problem=to_native(msg), problem_mark=node.start_mark, note=None) else: # when 'ignore' display.debug(msg) value = self.construct_object(value_node, deep=deep) mapping[key] = value return mapping
def preprocess_data(self, ds): # role names that are simply numbers can be parsed by PyYAML # as integers even when quoted, so turn it into a string type if isinstance(ds, int): ds = "%s" % ds if not isinstance(ds, dict) and not isinstance(ds, string_types) and not isinstance(ds, AnsibleBaseYAMLObject): raise AnsibleAssertionError() if isinstance(ds, dict): ds = super(RoleDefinition, self).preprocess_data(ds) # save the original ds for use later self._ds = ds # we create a new data structure here, using the same # object used internally by the YAML parsing code so we # can preserve file:line:column information if it exists new_ds = AnsibleMapping() if isinstance(ds, AnsibleBaseYAMLObject): new_ds.ansible_pos = ds.ansible_pos # first we pull the role name out of the data structure, # and then use that to determine the role path (which may # result in a new role name, if it was a file path) role_name = self._load_role_name(ds) (role_name, role_path) = self._load_role_path(role_name) # next, we split the role params out from the valid role # attributes and update the new datastructure with that # result and the role name if isinstance(ds, dict): (new_role_def, role_params) = self._split_role_params(ds) new_ds.update(new_role_def) self._role_params = role_params # set the role name in the new ds new_ds['role'] = role_name # we store the role path internally self._role_path = role_path # and return the cleaned-up data structure return new_ds
def construct_yaml_map(self, node): data = AnsibleMapping() yield data value = self.construct_mapping(node) data.update(value) data._line_number = value._line_number data._column_number = value._column_number data._data_source = value._data_source
def munge(self, ds): # create the new ds as an AnsibleMapping, so we can preserve any line/column # data from the parser, and copy that info from the old ds (if applicable) new_ds = AnsibleMapping() if isinstance(ds, AnsibleBaseYAMLObject): new_ds.copy_position_info(ds) # Role definitions can be strings or dicts, so we fix things up here. # Anything that is not a role name, tag, or conditional will also be # added to the params sub-dictionary for loading later if isinstance(ds, string_types): new_ds['role_name'] = ds else: # munge the role ds here to correctly fill in the various fields which # may be used to define the role, like: role, src, scm, etc. ds = self._munge_role(ds) # now we split any random role params off from the role spec and store # them in a dictionary of params for parsing later params = dict() attr_names = [ attr_name for (attr_name, attr_value) in self._get_base_attributes().iteritems() ] for (key, value) in iteritems(ds): if key not in attr_names and key != 'role': # this key does not match a field attribute, so it must be a role param params[key] = value else: # this is a field attribute, so copy it over directly new_ds[key] = value new_ds['params'] = params # Set the role name and path, based on the role definition (role_name, role_path) = self._get_role_path(new_ds.get('role_name')) new_ds['role_name'] = role_name new_ds['role_path'] = role_path # load the role's files, if they exist new_ds['task_blocks'] = self._load_role_yaml(role_path, 'tasks') new_ds['handler_blocks'] = self._load_role_yaml(role_path, 'handlers') new_ds['default_vars'] = self._load_role_yaml(role_path, 'defaults') new_ds['role_vars'] = self._load_role_yaml(role_path, 'vars') # we treat metadata slightly differently: we instead pull out the # valid metadata keys and munge them directly into new_ds metadata_ds = self._munge_metadata(role_name, role_path) new_ds.update(metadata_ds) # and return the newly munged ds return new_ds
def preprocess_data(self, ds): ''' tasks are especially complex arguments so need pre-processing. keep it short. ''' assert isinstance(ds, dict) # the new, cleaned datastructure, which will have legacy # items reduced to a standard structure suitable for the # attributes of the task class new_ds = AnsibleMapping() if isinstance(ds, AnsibleBaseYAMLObject): new_ds.ansible_pos = ds.ansible_pos # use the args parsing class to determine the action, args, # and the delegate_to value from the various possible forms # supported as legacy args_parser = ModuleArgsParser(task_ds=ds) (action, args, delegate_to) = args_parser.parse() new_ds['action'] = action new_ds['args'] = args new_ds['delegate_to'] = delegate_to for (k, v) in ds.iteritems(): if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell': # we don't want to re-assign these values, which were # determined by the ModuleArgsParser() above continue elif k.replace("with_", "") in lookup_loader: self._preprocess_loop(ds, new_ds, k, v) else: new_ds[k] = v return super(Task, self).preprocess_data(new_ds)
def preprocess_data(self, ds): ''' tasks are especially complex arguments so need pre-processing. keep it short. ''' assert isinstance(ds, dict) # the new, cleaned datastructure, which will have legacy # items reduced to a standard structure suitable for the # attributes of the task class new_ds = AnsibleMapping() if isinstance(ds, AnsibleBaseYAMLObject): new_ds.ansible_pos = ds.ansible_pos # use the args parsing class to determine the action, args, # and the delegate_to value from the various possible forms # supported as legacy args_parser = ModuleArgsParser(task_ds=ds) (action, args, delegate_to) = args_parser.parse() new_ds['action'] = action new_ds['args'] = args new_ds['delegate_to'] = delegate_to for (k,v) in ds.iteritems(): if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell': # we don't want to re-assign these values, which were # determined by the ModuleArgsParser() above continue elif k.replace("with_", "") in lookup_loader: self._preprocess_loop(ds, new_ds, k, v) else: new_ds[k] = v return super(Task, self).preprocess_data(new_ds)
def munge(self, ds): ''' Regorganizes the data for a TaskInclude datastructure to line up with what we expect the proper attributes to be ''' assert isinstance(ds, dict) # the new, cleaned datastructure, which will have legacy # items reduced to a standard structure new_ds = AnsibleMapping() if isinstance(ds, AnsibleBaseYAMLObject): new_ds.copy_position_info(ds) for (k, v) in ds.iteritems(): if k == 'include': self._munge_include(ds, new_ds, k, v) elif k.replace("with_", "") in lookup_loader: self._munge_loop(ds, new_ds, k, v) else: # some basic error checking, to make sure vars are properly # formatted and do not conflict with k=v parameters # FIXME: we could merge these instead, but controlling the order # in which they're encountered could be difficult if k == 'vars': if 'vars' in new_ds: raise AnsibleParserError( "include parameters cannot be mixed with 'vars' entries for include statements", obj=ds) elif not isinstance(v, dict): raise AnsibleParserError( "vars for include statements must be specified as a dictionary", obj=ds) new_ds[k] = v return new_ds
def munge(self, ds): assert isinstance(ds, dict) or isinstance(ds, string_types) # we create a new data structure here, using the same # object used internally by the YAML parsing code so we # can preserve file:line:column information if it exists new_ds = AnsibleMapping() if isinstance(ds, AnsibleBaseYAMLObject): new_ds.copy_position_info(ds) # first we pull the role name out of the data structure, # and then use that to determine the role path (which may # result in a new role name, if it was a file path) role_name = self._load_role_name(ds) (role_name, role_path) = self._load_role_path(role_name) # next, we split the role params out from the valid role # attributes and update the new datastructure with that # result and the role name if isinstance(ds, dict): (new_role_def, role_params) = self._split_role_params(ds) new_ds.update(new_role_def) self._role_params = role_params # set the role name in the new ds new_ds['role'] = role_name # we store the role path internally self._role_path = role_path # save the original ds for use later self._ds = ds # and return the cleaned-up data structure return new_ds
def parse(self, inventory, loader, path, cache=True): super(InventoryModule, self).parse(inventory, loader, path, cache) self.context = hiveContext() self.context.setup(None) self._read_config_data(path) self.inventory.add_group('services') self.inventory.add_group('images') self.inventory.add_group('volumes') self.inventory.add_group('drbd_volumes') self.inventory.add_group('nfs_volumes') self.inventory.add_group('networks') available_on = self.get_option('available_on') if available_on is None: available_on = STAGES for s in available_on: if s not in STAGES: raise AnsibleParserError( f'"element of "available_on" must be one of {STAGES}, but {s}' ) self.inventory.add_group(s) services = self.get_option('services') if type(services) != AnsibleMapping: raise AnsibleParserError( f'"services" must be dict type, but {type(services)}') device_id_map = self.context.vars.get('drbd_device_map', {}) published_port_map = self.context.vars.get('published_port_map', {}) for name, options in services.items(): service = Service(name, options, available_on) service.parse(inventory, device_id_map, published_port_map) self.context.set_persistent('drbd_device_map', device_id_map) self.context.set_persistent('published_port_map', published_port_map) self.context.save_persistent() networks = self.get_option('networks') no_default = True if networks is not None: if type(networks) != AnsibleMapping: raise AnsibleParserError(f'"networks" must be dict type') for name, options in networks.items(): if name == 'hive_default_network': no_default = False network = Network(name, options, available_on) network.parse(inventory) if no_default: network = Network('hive_default_network', AnsibleMapping(), available_on) network.parse(inventory)
def construct_mapping(self, node, deep=False): ret = AnsibleMapping( super(Constructor, self).construct_mapping(node, deep)) ret._line_number = node.__line__ ret._column_number = node.__column__ # in some cases, we may have pre-read the data and then # passed it to the load() call for YAML, in which case we # want to override the default datasource (which would be # '<string>') to the actual filename we read in if self._ansible_file_name: ret._data_source = self._ansible_file_name else: ret._data_source = node.__datasource__ return ret
def construct_mapping(self, node, deep=False, ordered=False): # Most of this is from yaml.constructor.SafeConstructor. We replicate # it here so that we can warn users when they have duplicate dict keys # (pyyaml silently allows overwriting keys) if not isinstance(node, MappingNode): raise ConstructorError( None, None, "expected a mapping node, but found %s" % node.id, node.start_mark) self.flatten_mapping(node) if ordered: mapping = AnsibleOrderedMapping() else: mapping = AnsibleMapping() # Add our extra information to the returned value mapping.ansible_pos = self._node_position_info(node) for key_node, value_node in node.value: key = self.construct_object(key_node, deep=deep) try: hash(key) except TypeError as exc: raise ConstructorError("while constructing a mapping", node.start_mark, "found unacceptable key (%s)" % exc, key_node.start_mark) if key in mapping: display.warning( u'While constructing a mapping from {1}, line {2}, column {3}, found a duplicate dict key ({0}).' u' Using last defined value only.'.format( key, *mapping.ansible_pos)) value = self.construct_object(value_node, deep=deep) mapping[key] = value return mapping
def munge(self, ds): assert isinstance(ds, dict) or isinstance(ds, string_types) if isinstance(ds, dict): ds = super(RoleDefinition, self).munge(ds) # we create a new data structure here, using the same # object used internally by the YAML parsing code so we # can preserve file:line:column information if it exists new_ds = AnsibleMapping() if isinstance(ds, AnsibleBaseYAMLObject): new_ds.copy_position_info(ds) # first we pull the role name out of the data structure, # and then use that to determine the role path (which may # result in a new role name, if it was a file path) role_name = self._load_role_name(ds) (role_name, role_path) = self._load_role_path(role_name) # next, we split the role params out from the valid role # attributes and update the new datastructure with that # result and the role name if isinstance(ds, dict): (new_role_def, role_params) = self._split_role_params(ds) new_ds.update(new_role_def) self._role_params = role_params # set the role name in the new ds new_ds['role'] = role_name # we store the role path internally self._role_path = role_path # save the original ds for use later self._ds = ds # and return the cleaned-up data structure return new_ds
def preprocess_data(self, ds): ''' tasks are especially complex arguments so need pre-processing. keep it short. ''' if not isinstance(ds, dict): raise AnsibleAssertionError('ds (%s) should be a dict but was a %s' % (ds, type(ds))) # the new, cleaned datastructure, which will have legacy # items reduced to a standard structure suitable for the # attributes of the task class new_ds = AnsibleMapping() if isinstance(ds, AnsibleBaseYAMLObject): new_ds.ansible_pos = ds.ansible_pos # since this affects the task action parsing, we have to resolve in preprocess instead of in typical validator default_collection = AnsibleCollectionLoader().default_collection # use the parent value if our ds doesn't define it collections_list = ds.get('collections', self.collections) if collections_list is None: collections_list = [] if isinstance(collections_list, string_types): collections_list = [collections_list] if default_collection and not self._role: # FIXME: and not a collections role if collections_list: if default_collection not in collections_list: collections_list.insert(0, default_collection) else: collections_list = [default_collection] if collections_list and 'ansible.builtin' not in collections_list and 'ansible.legacy' not in collections_list: collections_list.append('ansible.legacy') if collections_list: ds['collections'] = collections_list # use the args parsing class to determine the action, args, # and the delegate_to value from the various possible forms # supported as legacy args_parser = ModuleArgsParser(task_ds=ds, collection_list=collections_list) try: (action, args, delegate_to) = args_parser.parse() except AnsibleParserError as e: # if the raises exception was created with obj=ds args, then it includes the detail # so we dont need to add it so we can just re raise. if e._obj: raise # But if it wasn't, we can add the yaml object now to get more detail raise AnsibleParserError(to_native(e), obj=ds, orig_exc=e) # the command/shell/script modules used to support the `cmd` arg, # which corresponds to what we now call _raw_params, so move that # value over to _raw_params (assuming it is empty) if action in ('command', 'shell', 'script'): if 'cmd' in args: if args.get('_raw_params', '') != '': raise AnsibleError("The 'cmd' argument cannot be used when other raw parameters are specified." " Please put everything in one or the other place.", obj=ds) args['_raw_params'] = args.pop('cmd') new_ds['action'] = action new_ds['args'] = args new_ds['delegate_to'] = delegate_to # we handle any 'vars' specified in the ds here, as we may # be adding things to them below (special handling for includes). # When that deprecated feature is removed, this can be too. if 'vars' in ds: # _load_vars is defined in Base, and is used to load a dictionary # or list of dictionaries in a standard way new_ds['vars'] = self._load_vars(None, ds.get('vars')) else: new_ds['vars'] = dict() for (k, v) in iteritems(ds): if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell': # we don't want to re-assign these values, which were determined by the ModuleArgsParser() above continue elif k.startswith('with_') and k.replace("with_", "") in lookup_loader: # transform into loop property self._preprocess_with_loop(ds, new_ds, k, v) else: # pre-2.0 syntax allowed variables for include statements at the top level of the task, # so we move those into the 'vars' dictionary here, and show a deprecation message # as we will remove this at some point in the future. if action in ('include',) and k not in self._valid_attrs and k not in self.DEPRECATED_ATTRIBUTES: display.deprecated("Specifying include variables at the top-level of the task is deprecated." " Please see:\nhttps://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n" " for currently supported syntax regarding included files and variables", version="2.12") new_ds['vars'][k] = v elif C.INVALID_TASK_ATTRIBUTE_FAILED or k in self._valid_attrs: new_ds[k] = v else: display.warning("Ignoring invalid attribute: %s" % k) return super(Task, self).preprocess_data(new_ds)
def construct_yaml_map(self, node): data = AnsibleMapping() yield data value = self.construct_mapping(node) data.update(value) data.ansible_pos = self._node_position_info(node)
def preprocess_data(self, ds): ''' tasks are especially complex arguments so need pre-processing. keep it short. ''' assert isinstance(ds, dict), 'ds (%s) should be a dict but was a %s' % (ds, type(ds)) # the new, cleaned datastructure, which will have legacy # items reduced to a standard structure suitable for the # attributes of the task class new_ds = AnsibleMapping() if isinstance(ds, AnsibleBaseYAMLObject): new_ds.ansible_pos = ds.ansible_pos # use the args parsing class to determine the action, args, # and the delegate_to value from the various possible forms # supported as legacy args_parser = ModuleArgsParser(task_ds=ds) try: (action, args, delegate_to) = args_parser.parse() except AnsibleParserError as e: raise AnsibleParserError(to_native(e), obj=ds, orig_exc=e) # the command/shell/script modules used to support the `cmd` arg, # which corresponds to what we now call _raw_params, so move that # value over to _raw_params (assuming it is empty) if action in ('command', 'shell', 'script'): if 'cmd' in args: if args.get('_raw_params', '') != '': raise AnsibleError("The 'cmd' argument cannot be used when other raw parameters are specified." " Please put everything in one or the other place.", obj=ds) args['_raw_params'] = args.pop('cmd') new_ds['action'] = action new_ds['args'] = args new_ds['delegate_to'] = delegate_to # we handle any 'vars' specified in the ds here, as we may # be adding things to them below (special handling for includes). # When that deprecated feature is removed, this can be too. if 'vars' in ds: # _load_vars is defined in Base, and is used to load a dictionary # or list of dictionaries in a standard way new_ds['vars'] = self._load_vars(None, ds.get('vars')) else: new_ds['vars'] = dict() for (k, v) in iteritems(ds): if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell': # we don't want to re-assign these values, which were determined by the ModuleArgsParser() above continue elif k.replace("with_", "") in lookup_loader: # transform into loop property self._preprocess_loop(ds, new_ds, k, v) else: # pre-2.0 syntax allowed variables for include statements at the top level of the task, # so we move those into the 'vars' dictionary here, and show a deprecation message # as we will remove this at some point in the future. if action in ('include', 'include_tasks') and k not in self._valid_attrs and k not in self.DEPRECATED_ATTRIBUTES: display.deprecated("Specifying include variables at the top-level of the task is deprecated." " Please see:\nhttp://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n" " for currently supported syntax regarding included files and variables", version="2.7") new_ds['vars'][k] = v elif k in self._valid_attrs: new_ds[k] = v else: display.warning("Ignoring invalid attribute: %s" % k) return super(Task, self).preprocess_data(new_ds)
def test_basic_task_include(self): ti = TaskInclude.load(AnsibleMapping(include='foo.yml'), loader=self._fake_loader) tasks = ti.compile()
def construct_mapping(self, node, deep=False): ret = AnsibleMapping( super(Constructor, self).construct_mapping(node, deep)) ret.ansible_pos = self._node_position_info(node) return ret
def test_task_include_with_tags(self): ti = TaskInclude.load(AnsibleMapping(include='foo.yml', tags="foo"), loader=self._fake_loader) ti = TaskInclude.load(AnsibleMapping(include='foo.yml', tags=["foo", "bar"]), loader=self._fake_loader)
def preprocess_data(self, ds): """ tasks are especially complex arguments so need pre-processing. keep it short. """ assert isinstance(ds, dict) # the new, cleaned datastructure, which will have legacy # items reduced to a standard structure suitable for the # attributes of the task class new_ds = AnsibleMapping() if isinstance(ds, AnsibleBaseYAMLObject): new_ds.ansible_pos = ds.ansible_pos # use the args parsing class to determine the action, args, # and the delegate_to value from the various possible forms # supported as legacy args_parser = ModuleArgsParser(task_ds=ds) try: (action, args, delegate_to) = args_parser.parse() except AnsibleParserError as e: raise AnsibleParserError(to_str(e), obj=ds) # the command/shell/script modules used to support the `cmd` arg, # which corresponds to what we now call _raw_params, so move that # value over to _raw_params (assuming it is empty) if action in ("command", "shell", "script"): if "cmd" in args: if args.get("_raw_params", "") != "": raise AnsibleError( "The 'cmd' argument cannot be used when other raw parameters are specified." " Please put everything in one or the other place.", obj=ds, ) args["_raw_params"] = args.pop("cmd") new_ds["action"] = action new_ds["args"] = args new_ds["delegate_to"] = delegate_to # we handle any 'vars' specified in the ds here, as we may # be adding things to them below (special handling for includes). # When that deprecated feature is removed, this can be too. if "vars" in ds: # _load_vars is defined in Base, and is used to load a dictionary # or list of dictionaries in a standard way new_ds["vars"] = self._load_vars(None, ds.get("vars")) else: new_ds["vars"] = dict() for (k, v) in iteritems(ds): if k in ("action", "local_action", "args", "delegate_to") or k == action or k == "shell": # we don't want to re-assign these values, which were # determined by the ModuleArgsParser() above continue elif k.replace("with_", "") in lookup_loader: self._preprocess_loop(ds, new_ds, k, v) else: # pre-2.0 syntax allowed variables for include statements at the # top level of the task, so we move those into the 'vars' dictionary # here, and show a deprecation message as we will remove this at # some point in the future. if action == "include" and k not in self._get_base_attributes() and k not in self.DEPRECATED_ATTRIBUTES: display.deprecated( "Specifying include variables at the top-level of the task is deprecated." " Please see:\nhttp://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n" " for currently supported syntax regarding included files and variables" ) new_ds["vars"][k] = v else: new_ds[k] = v return super(Task, self).preprocess_data(new_ds)
def test_task_include_with_loop(self): ti = TaskInclude.load(AnsibleMapping(include='foo.yml', with_items=['a', 'b', 'c']), loader=self._fake_loader)
def construct_mapping(self, node, deep=False): ret = AnsibleMapping(super(Constructor, self).construct_mapping(node, deep)) ret.ansible_pos = self._node_position_info(node) return ret
def preprocess_data(self, ds): ''' tasks are especially complex arguments so need pre-processing. keep it short. ''' if not isinstance(ds, dict): raise AnsibleAssertionError( 'ds (%s) should be a dict but was a %s' % (ds, type(ds))) # the new, cleaned datastructure, which will have legacy # items reduced to a standard structure suitable for the # attributes of the task class new_ds = AnsibleMapping() if isinstance(ds, AnsibleBaseYAMLObject): new_ds.ansible_pos = ds.ansible_pos # since this affects the task action parsing, we have to resolve in preprocess instead of in typical validator default_collection = AnsibleCollectionConfig.default_collection collections_list = ds.get('collections') if collections_list is None: # use the parent value if our ds doesn't define it collections_list = self.collections else: # Validate this untemplated field early on to guarantee we are dealing with a list. # This is also done in CollectionSearch._load_collections() but this runs before that call. collections_list = self.get_validated_value( 'collections', self._collections, collections_list, None) if default_collection and not self._role: # FIXME: and not a collections role if collections_list: if default_collection not in collections_list: collections_list.insert(0, default_collection) else: collections_list = [default_collection] if collections_list and 'ansible.builtin' not in collections_list and 'ansible.legacy' not in collections_list: collections_list.append('ansible.legacy') if collections_list: ds['collections'] = collections_list # use the args parsing class to determine the action, args, # and the delegate_to value from the various possible forms # supported as legacy args_parser = ModuleArgsParser(task_ds=ds, collection_list=collections_list) try: (action, args, delegate_to) = args_parser.parse() except AnsibleParserError as e: # if the raises exception was created with obj=ds args, then it includes the detail # so we dont need to add it so we can just re raise. if e.obj: raise # But if it wasn't, we can add the yaml object now to get more detail raise AnsibleParserError(to_native(e), obj=ds, orig_exc=e) else: self._ansible_internal_redirect_list = args_parser.internal_redirect_list[:] # the command/shell/script modules used to support the `cmd` arg, # which corresponds to what we now call _raw_params, so move that # value over to _raw_params (assuming it is empty) if action in C._ACTION_HAS_CMD: if 'cmd' in args: if args.get('_raw_params', '') != '': raise AnsibleError( "The 'cmd' argument cannot be used when other raw parameters are specified." " Please put everything in one or the other place.", obj=ds) args['_raw_params'] = args.pop('cmd') new_ds['action'] = action new_ds['args'] = args new_ds['delegate_to'] = delegate_to # we handle any 'vars' specified in the ds here, as we may # be adding things to them below (special handling for includes). # When that deprecated feature is removed, this can be too. if 'vars' in ds: # _load_vars is defined in Base, and is used to load a dictionary # or list of dictionaries in a standard way new_ds['vars'] = self._load_vars(None, ds.get('vars')) else: new_ds['vars'] = dict() for (k, v) in iteritems(ds): if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell': # we don't want to re-assign these values, which were determined by the ModuleArgsParser() above continue elif k.startswith('with_') and k.replace("with_", "") in lookup_loader: # transform into loop property self._preprocess_with_loop(ds, new_ds, k, v) elif C.INVALID_TASK_ATTRIBUTE_FAILED or k in self._valid_attrs: new_ds[k] = v else: display.warning("Ignoring invalid attribute: %s" % k) return super(Task, self).preprocess_data(new_ds)
def test_task_include_with_conditional(self): ti = TaskInclude.load(AnsibleMapping(include='foo.yml', when="1 == 1"), loader=self._fake_loader)
def preprocess_data(self, ds): ''' tasks are especially complex arguments so need pre-processing. keep it short. ''' assert isinstance( ds, dict), 'ds (%s) should be a dict but was a %s' % (ds, type(ds)) # the new, cleaned datastructure, which will have legacy # items reduced to a standard structure suitable for the # attributes of the task class new_ds = AnsibleMapping() if isinstance(ds, AnsibleBaseYAMLObject): new_ds.ansible_pos = ds.ansible_pos # use the args parsing class to determine the action, args, # and the delegate_to value from the various possible forms # supported as legacy args_parser = ModuleArgsParser(task_ds=ds) try: (action, args, delegate_to) = args_parser.parse() except AnsibleParserError as e: raise AnsibleParserError(to_native(e), obj=ds, orig_exc=e) # the command/shell/script modules used to support the `cmd` arg, # which corresponds to what we now call _raw_params, so move that # value over to _raw_params (assuming it is empty) if action in ('command', 'shell', 'script'): if 'cmd' in args: if args.get('_raw_params', '') != '': raise AnsibleError( "The 'cmd' argument cannot be used when other raw parameters are specified." " Please put everything in one or the other place.", obj=ds) args['_raw_params'] = args.pop('cmd') new_ds['action'] = action new_ds['args'] = args new_ds['delegate_to'] = delegate_to # we handle any 'vars' specified in the ds here, as we may # be adding things to them below (special handling for includes). # When that deprecated feature is removed, this can be too. if 'vars' in ds: # _load_vars is defined in Base, and is used to load a dictionary # or list of dictionaries in a standard way new_ds['vars'] = self._load_vars(None, ds.get('vars')) else: new_ds['vars'] = dict() for (k, v) in iteritems(ds): if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell': # we don't want to re-assign these values, which were determined by the ModuleArgsParser() above continue elif k.replace("with_", "") in lookup_loader: # transform into loop property self._preprocess_with_loop(ds, new_ds, k, v) else: # pre-2.0 syntax allowed variables for include statements at the top level of the task, # so we move those into the 'vars' dictionary here, and show a deprecation message # as we will remove this at some point in the future. if action in ( 'include', 'include_tasks' ) and k not in self._valid_attrs and k not in self.DEPRECATED_ATTRIBUTES: display.deprecated( "Specifying include variables at the top-level of the task is deprecated." " Please see:\nhttp://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n" " for currently supported syntax regarding included files and variables", version="2.7") new_ds['vars'][k] = v elif k in self._valid_attrs: new_ds[k] = v else: display.warning("Ignoring invalid attribute: %s" % k) return super(Task, self).preprocess_data(new_ds)
def construct_mapping(self, node, deep=False): ret = AnsibleMapping(super(Constructor, self).construct_mapping(node, deep)) ret._line_number = node.__line__ ret._column_number = node.__column__ ret._data_source = node.__datasource__ return ret