def ensure_xml_or_str(data, field): if not data: tipe = None else: tipe = "str" result = data if isinstance(data, str) and (re.match(r"^<.+>$", data)): try: fromstring(filter) tipe = "xml" except XMLSyntaxError: pass except Exception as exc: error = "'{field}' recognized as XML but was not valid. ".format( field=field) raise AnsibleModuleError(error + to_native(exc)) if isinstance(data, dict): if not HAS_XMLTODICT: msg = "{field} was provided as a dictionary, conversion to XML requires 'xmltodict'. ".format( field=field) raise AnsibleModuleError(msg + missing_required_lib("xmltodict")) try: result = xmltodict.unparse(data, full_document=False) tipe = "xml" except Exception as exc: error = "'{field}' was dictionary but conversion to XML failed. ".format( field=field) raise AnsibleModuleError(error + to_native(exc)) return result, tipe
def run(self, tmp=None, task_vars=None): ''' handler for cli operations ''' if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect try: spec = self._task.args['spec'] except KeyError as exc: raise AnsibleModuleError(to_text(exc)) if not spec: raise AnsibleModuleError('missing required argument: spec') spec_fp = os.path.join(task_vars['role_path'], 'meta/%s' % spec) display.vvv('using role spec %s' % spec_fp) spec = self._loader.load_from_file(spec_fp) argument_spec = spec.get('argument_spec') or {} args = {} for key, attrs in iteritems(argument_spec): if attrs is None: argument_spec[key] = {'type': 'str'} if key in task_vars: if isinstance(task_vars[key], string_types): value = self._templar.do_template(task_vars[key]) if value: args[key] = value else: args[key] = task_vars[key] elif attrs: if 'aliases' in attrs: for item in attrs['aliases']: if item in task_vars: args[key] = self._templar.do_template( task_vars[key]) elif 'default' in attrs and key not in args: args[key] = attrs['default'] basic._ANSIBLE_ARGS = to_bytes( json.dumps({'ANSIBLE_MODULE_ARGS': args})) basic.AnsibleModule.fail_json = self.fail_json spec = dict([(k, v) for k, v in iteritems(spec) if k in self.VALID_MODULE_KWARGS]) basic.AnsibleModule(**spec) self._remove_tmp_path(self._connection._shell.tmpdir) return result
def xml_to_native(obj, field, full_doc=False, pretty=False): if not HAS_XMLTODICT: msg = "{field} was set to 'native, conversion from XML requires 'xmltodict'. ".format( field=field) raise AnsibleModuleError(msg + missing_required_lib("xmltodict")) try: return xmltodict.parse(obj, dict_constructor=dict) except Exception as exc: error = "'xmltodict' returned the following error when converting {field} from XML. ".format( field=field) raise AnsibleModuleError(error + to_native(exc))
def _check_result(self, module_name, run_on_ca_host, result=None, ignore_changed=False, ignore_failed=False): if not ignore_failed and 'failed' in result and result['failed']: if self._task_vars['ansible_verbosity'] > 2: result['action_stack'] = ''.join(traceback.format_stack()) result['module_msg'] = "No message" if 'msg' in result: result['module_msg'] = result['msg'] elif 'message' in result: result['module_msg'] = result['message'] host_type = "target host" if run_on_ca_host: host_type = "ca host" result['msg'] = "Error during {0} module execution on {1}".format( module_name, host_type) if 'ansible_delegated_vars' in result: del result['ansible_delegated_vars'] if '_ansible_delegated_vars' in result: del result['_ansible_delegated_vars'] raise AnsibleModuleError(result['msg'], orig_exc=result) if not ignore_changed and 'changed' in result and result['changed']: self._changed = True
def run(self, terms, variables=None, **kwargs): if not PYOPENSSL_FOUND: raise AnsibleModuleError('ssl_key_text plugin requires pyOpenSSL') # lookups in general are expected to both take a list as input and output a list # this is done so they work with the looping construct 'with_'. ret = [] for term in terms: display.debug("key lookup term: %s" % term) # Find the file in the expected search path, using a class method # that implements the 'expected' search path for Ansible plugins. lookupfile = self.find_file_in_search_path(variables, 'files', term) # Don't use print or your own logging, the display class # takes care of it in a unified way. display.vvvv(u"key lookup using %s as file" % lookupfile) try: passphrase = kwargs.get('passphrase', None) if passphrase == '': passphrase = None if lookupfile: contents = crypto_utils.load_privatekey(lookupfile, passphrase) text_contents = crypto.dump_privatekey(crypto.FILETYPE_PEM, contents) ret.append(to_text(text_contents)) else: # Always use ansible error classes to throw 'final' exceptions, # so the Ansible engine will know how to deal with them. # The Parser error indicates invalid options passed raise AnsibleParserError() except AnsibleParserError: raise AnsibleError("could not locate file in lookup: %s" % term) return ret
def _assert_if_type_mismatch(obj, type_=str): """ :param obj: object to test type :param type_: expected type """ if not isinstance(obj, type_): raise AnsibleModuleError("The arguments expect %r" % type_)
def run(self, tmp=None, task_vars=None): ''' handler for cli operations ''' if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect try: spec = self._task.args['spec'] except KeyError as exc: raise AnsibleModuleError(to_text(exc)) if not spec: raise AnsibleModuleError('missing required argument: spec') spec_fp = os.path.join(task_vars['role_path'], 'meta/%s' % spec) display.vvv('using role spec %s' % spec_fp) spec = self._loader.load_from_file(spec_fp) if 'argument_spec' not in spec: return { 'failed': True, 'msg': 'missing required field in specification file: argument_spec' } argument_spec = spec['argument_spec'] args = {} self._handle_options(task_vars, args, argument_spec) basic._ANSIBLE_ARGS = to_bytes( json.dumps({'ANSIBLE_MODULE_ARGS': args})) basic.AnsibleModule.fail_json = self.fail_json spec = dict([(k, v) for k, v in iteritems(spec) if k in self.VALID_MODULE_KWARGS]) validated_spec = basic.AnsibleModule(**spec) result['role_params'] = validated_spec.params result['changed'] = False self._remove_tmp_path(self._connection._shell.tmpdir) return result
def map_enum_map(val): tmp = mapping.get(val, None) if tmp is None: raise AnsibleModuleError( "Unknown enum mapping value '{}' for '{}':"\ " {}".format(mapping_id, val, mapping) ) return tmp
def has_mode(path, mode='0o644'): """ :param path: File or dir path :param mode: Expected mode of the target, e.g. '755', '1644' """ _assert_if_type_mismatch(path) _assert_if_type_mismatch(mode) if not os.path.exists(path): raise AnsibleFileNotFound("Does not exist: {}".format(repr(path))) if not re.match(r"^\d?\d{3}$", mode): raise AnsibleModuleError("mode must be in the form [0-9]+: " "{}".format(mode)) _mode = oct(os.stat(path).st_mode).replace('0o', '') return mode == _mode[len(_mode) - len(mode):]
def exec_powershell_script(self, finalcmd, preprocess=None, extra_psargs=None, keyfilter=None, keys_exclude=False, data_return=False, expect_min=None, expect_max=None, force_unlist=False, cmd_exe=False, **kwargs): script = preprocess or '' if script: script += '\n' # make sure to return result as json if data_return: script += "{} | ConvertTo-Json".format(finalcmd) else: script += "{}".format(finalcmd) if cmd_exe: if len(script.split('\n')) > 1: raise AnsibleModuleError( "cmd_exe psmode can only be used for oneliners") script = 'cmd /c ' + script modargs = extra_psargs or {} ## note: _raw_params is the magic internal key for free_form args modargs.update(stdin=script, _raw_params='powershell.exe -') res = self.exec_module('ansible.windows.win_command', modargs=modargs, **kwargs) if not data_return: return res ## note: normally, a subcall failing will immediately abort this ## plugin, but it is possible, that a subcall failure is deemed ## acceptable for specific use cases, if that's occure, just ## return the raw result tmp = res['stdout'] if not tmp: if expect_min: raise AnsibleModuleError( "Bad powershell script call '{}': Expected at least"\ " '{}' items returned, but result was empty".format( finalcmd, expect_min ) ) res['result_json'] = {} return res ## on success we expect stdout to contain valid json result object tmp = json.loads(tmp) delist = False display.vvv( "[ACTION_PLUGIN_WIN] :: execute powershell script :: raw"\ " jsoned results: {}".format(tmp) ) display.vvv( "[ACTION_PLUGIN_WIN] :: execute powershell script ::"\ " json key filter ({1}): {0}".format(keyfilter, 'exclude' if keys_exclude else 'include' ) ) if not isinstance(tmp, list): delist = True tmp = [tmp] if expect_min: if expect_max and expect_min > expect_max: raise AnsibleModuleError( "Bad powershell script call '{}': Invalid"\ " parameters, expect_min ('{}') cannot be greater"\ " than expect_max('{}')".format( finalcmd, expect_min, expect_max ) ) if len(tmp) < expect_min: raise AnsibleModuleError( "Bad powershell script call '{}': Expected at least"\ " '{}' items returned, but got only '{}'".format( finalcmd, expect_min, len(tmp) ) ) if expect_max and len(tmp) > expect_max: raise AnsibleModuleError( "Bad powershell script call '{}': Expected at most '{}'"\ " items returned, but got '{}'".format( finalcmd, expect_max, len(tmp) ) ) ## optionally filter out keys on first level of ## returned dict(s) / json object(s) for jo in tmp: new_jo = {} for k in (keyfilter or []): if keys_exclude: jo.pop(k, None) elif k in jo: new_jo[k] = jo[k] if new_jo: jo.clear() jo.update(new_jo) if delist or force_unlist: if len(tmp) != 1: raise AnsibleModuleError( "Bad powershell script call '{}': Delisting json"\ " result is only allowed for single element lists,"\ " but got '{}' items: {}".format(finalcmd, len(tmp), tmp) ) tmp = tmp[0] res['result_json'] = tmp display.vvv( "[ACTION_PLUGIN_WIN] :: execute powershell script ::"\ " final json results: {}".format(tmp) ) return res
def fail_json(self, msg): raise AnsibleModuleError(msg)
def _fail_json(self, msg): msg = msg.replace('(basic.py)', self._task.action) raise AnsibleModuleError(msg)