def _exec_module(self, module): """exec the module's main() since modules print their result, we need to replace stdout with a buffer. If main() fails, we assume that as stderr Once we collect stdout/stderr, use our super to json load it or handle a traceback :param module: An loaded module :type module: A module file that was loaded :return module_result: The result of the module :rtype module_result: dict """ import io import sys from ansible.module_utils._text import to_native from ansible.vars.clean import remove_internal_keys # preserve previous stdout, replace with buffer sys_stdout = sys.stdout sys.stdout = io.StringIO() # run the module, catch the SystemExit so we continue try: module.main() except SystemExit: # module exited cleanly stdout = sys.stdout.getvalue() stderr = "" except Exception as exc: # dirty module or connection traceback stderr = to_native(exc) stdout = "" # restore stdout & stderr sys.stdout = sys_stdout # parse the response dict_out = { "stdout": stdout, "stdout_lines": stdout.splitlines(), "stderr": stderr, "stderr_lines": stderr.splitlines(), } data = self._parse_returned_data(dict_out) # Clean up the response like action _execute_module remove_internal_keys(data) # split stdout/stderr into lines if needed if "stdout" in data and "stdout_lines" not in data: # if the value is 'False', a default won't catch it. txt = data.get("stdout", None) or "" data["stdout_lines"] = txt.splitlines() if "stderr" in data and "stderr_lines" not in data: # if the value is 'False', a default won't catch it. txt = data.get("stderr", None) or "" data["stderr_lines"] = txt.splitlines() return data
# actually execute res = self._low_level_execute_command(cmd, sudoable=sudoable, in_data=in_data) # parse the main result data = self._parse_returned_data(res) # NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE # get internal info before cleaning tmpdir_delete = (not data.pop("_ansible_suppress_tmpdir_delete", False) and wrap_async) # remove internal keys remove_internal_keys(data) # cleanup tmp? if (self._play_context.become and self._play_context.become_user != 'root' ) and not persist_files and delete_remote_tmp or tmpdir_delete: self._remove_tmp_path(tmp) # FIXME: for backwards compat, figure out if still makes sense if wrap_async: data['changed'] = True # pre-split stdout/stderr into lines if needed if 'stdout' in data and 'stdout_lines' not in data: # if the value is 'False', a default won't catch it. txt = data.get('stdout', None) or u''
def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=True, wrap_async=False): ''' Transfer and run a module along with its arguments. ''' if task_vars is None: task_vars = dict() remote_module_path = None args_file_path = None remote_files = [] # if a module name was not specified for this execution, use the action from the task if module_name is None: module_name = self._task.action if module_args is None: module_args = self._task.args self._update_module_args(module_name, module_args, task_vars) # FUTURE: refactor this along with module build process to better encapsulate "smart wrapper" functionality (module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars) display.vvv("Using module file %s" % module_path) if not shebang and module_style != 'binary': raise AnsibleError("module (%s) is missing interpreter line" % module_name) if not self._is_pipelining_enabled(module_style, wrap_async): # we might need remote tmp dir if not tmp or 'tmp' not in tmp: tmp = self._make_tmp_path() remote_module_filename = self._connection._shell.get_remote_filename( module_path) remote_module_path = self._connection._shell.join_path( tmp, remote_module_filename) if module_style in ('old', 'non_native_want_json', 'binary'): # we'll also need a temp file to hold our module arguments args_file_path = self._connection._shell.join_path(tmp, 'args') if remote_module_path or module_style != 'new': display.debug("transferring module to remote %s" % remote_module_path) if module_style == 'binary': self._transfer_file(module_path, remote_module_path) else: self._transfer_data(remote_module_path, module_data) if module_style == 'old': # we need to dump the module args to a k=v string in a file on # the remote system, which can be read and parsed by the module args_data = "" for k, v in iteritems(module_args): args_data += '%s=%s ' % (k, shlex_quote(text_type(v))) self._transfer_data(args_file_path, args_data) elif module_style in ('non_native_want_json', 'binary'): self._transfer_data(args_file_path, json.dumps(module_args)) display.debug("done transferring module to remote") environment_string = self._compute_environment_string() if tmp and remote_module_path: remote_files = [tmp, remote_module_path] if args_file_path: remote_files.append(args_file_path) sudoable = True in_data = None cmd = "" if wrap_async and not self._connection.always_pipeline_modules: # configure, upload, and chmod the async_wrapper module (async_module_style, shebang, async_module_data, async_module_path) = self._configure_module( module_name='async_wrapper', module_args=dict(), task_vars=task_vars) async_module_remote_filename = self._connection._shell.get_remote_filename( async_module_path) remote_async_module_path = self._connection._shell.join_path( tmp, async_module_remote_filename) self._transfer_data(remote_async_module_path, async_module_data) remote_files.append(remote_async_module_path) async_limit = self._task.async_val async_jid = str(random.randint(0, 999999999999)) # call the interpreter for async_wrapper directly # this permits use of a script for an interpreter on non-Linux platforms # TODO: re-implement async_wrapper as a regular module to avoid this special case interpreter = shebang.replace('#!', '').strip() async_cmd = [ interpreter, remote_async_module_path, async_jid, async_limit, remote_module_path ] if environment_string: async_cmd.insert(0, environment_string) if args_file_path: async_cmd.append(args_file_path) else: # maintain a fixed number of positional parameters for async_wrapper async_cmd.append('_') if not self._should_remove_tmp_path(tmp): async_cmd.append("-preserve_tmp") cmd = " ".join(to_text(x) for x in async_cmd) else: if self._is_pipelining_enabled(module_style): in_data = module_data else: cmd = remote_module_path rm_tmp = None if self._should_remove_tmp_path( tmp) and not persist_files and delete_remote_tmp: if not self._play_context.become or self._play_context.become_user == 'root': # not sudoing or sudoing to root, so can cleanup files in the same step rm_tmp = tmp cmd = self._connection._shell.build_module_command( environment_string, shebang, cmd, arg_path=args_file_path, rm_tmp=rm_tmp).strip() # Fix permissions of the tmp path and tmp files. This should be called after all files have been transferred. if remote_files: # remove none/empty remote_files = [x for x in remote_files if x] self._fixup_perms2(remote_files, self._play_context.remote_user) # actually execute res = self._low_level_execute_command(cmd, sudoable=sudoable, in_data=in_data) # parse the main result data = self._parse_returned_data(res) # NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE # get internal info before cleaning tmpdir_delete = (not data.pop("_ansible_suppress_tmpdir_delete", False) and wrap_async) # remove internal keys remove_internal_keys(data) # cleanup tmp? if (self._play_context.become and self._play_context.become_user != 'root' ) and not persist_files and delete_remote_tmp or tmpdir_delete: self._remove_tmp_path(tmp) # FIXME: for backwards compat, figure out if still makes sense if wrap_async: data['changed'] = True # pre-split stdout/stderr into lines if needed if 'stdout' in data and 'stdout_lines' not in data: # if the value is 'False', a default won't catch it. txt = data.get('stdout', None) or u'' data['stdout_lines'] = txt.splitlines() if 'stderr' in data and 'stderr_lines' not in data: # if the value is 'False', a default won't catch it. txt = data.get('stderr', None) or u'' data['stderr_lines'] = txt.splitlines() display.debug("done with _execute_module (%s, %s)" % (module_name, module_args)) return data
def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=True, wrap_async=False): """ Collect up a module's execution environment then use it to invoke target.run_module() or helpers.run_module_async() in the target context. """ if module_name is None: module_name = self._task.action if module_args is None: module_args = self._task.args if task_vars is None: task_vars = {} self._update_module_args(module_name, module_args, task_vars) env = {} self._compute_environment_string(env) self._set_temp_file_args(module_args, wrap_async) # there's a case where if a task shuts down the node and then immediately calls # wait_for_connection, the `ping` test from Ansible won't pass because we lost connection # clearing out context forces a reconnect # see https://github.com/dw/mitogen/issues/655 and Ansible's `wait_for_connection` module for more info if module_name == 'ansible.legacy.ping' and type( self).__name__ == 'wait_for_connection': self._connection.context = None self._connection._connect() result = ansible_mitogen.planner.invoke( ansible_mitogen.planner.Invocation( action=self, connection=self._connection, module_name=mitogen.core.to_text(module_name), module_args=mitogen.utils.cast(module_args), task_vars=task_vars, templar=self._templar, env=mitogen.utils.cast(env), wrap_async=wrap_async, timeout_secs=self.get_task_timeout_secs(), )) if tmp and delete_remote_tmp and ansible_mitogen.utils.ansible_version[:2] < ( 2, 5): # Built-in actions expected tmpdir to be cleaned up automatically # on _execute_module(). self._remove_tmp_path(tmp) # prevents things like discovered_interpreter_* or ansible_discovered_interpreter_* from being set # handle ansible 2.3.3 that has remove_internal_keys in a different place check = remove_internal_keys(result) if check == 'Not found': self._remove_internal_keys(result) # taken from _execute_module of ansible 2.8.6 # propagate interpreter discovery results back to the controller if self._discovered_interpreter_key: if result.get('ansible_facts') is None: result['ansible_facts'] = {} # only cache discovered_interpreter if we're not running a rediscovery # rediscovery happens in places like docker connections that could have different # python interpreters than the main host if not self._rediscovered_python: result['ansible_facts'][ self. _discovered_interpreter_key] = self._discovered_interpreter if self._discovery_warnings: if result.get('warnings') is None: result['warnings'] = [] result['warnings'].extend(self._discovery_warnings) if self._discovery_deprecation_warnings: if result.get('deprecations') is None: result['deprecations'] = [] result['deprecations'].extend(self._discovery_deprecation_warnings) return wrap_var(result)
def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=True, wrap_async=False): """ Collect up a module's execution environment then use it to invoke target.run_module() or helpers.run_module_async() in the target context. """ if module_name is None: module_name = self._task.action if module_args is None: module_args = self._task.args if task_vars is None: task_vars = {} self._update_module_args(module_name, module_args, task_vars) env = {} self._compute_environment_string(env) self._set_temp_file_args(module_args, wrap_async) self._connection._connect() result = ansible_mitogen.planner.invoke( ansible_mitogen.planner.Invocation( action=self, connection=self._connection, module_name=mitogen.core.to_text(module_name), module_args=mitogen.utils.cast(module_args), task_vars=task_vars, templar=self._templar, env=mitogen.utils.cast(env), wrap_async=wrap_async, timeout_secs=self.get_task_timeout_secs(), )) if tmp and ansible.__version__ < '2.5' and delete_remote_tmp: # Built-in actions expected tmpdir to be cleaned up automatically # on _execute_module(). self._remove_tmp_path(tmp) # prevents things like discovered_interpreter_* or ansible_discovered_interpreter_* from being set # handle ansible 2.3.3 that has remove_internal_keys in a different place check = remove_internal_keys(result) if check == 'Not found': self._remove_internal_keys(result) # taken from _execute_module of ansible 2.8.6 # propagate interpreter discovery results back to the controller if self._discovered_interpreter_key: if result.get('ansible_facts') is None: result['ansible_facts'] = {} # only cache discovered_interpreter if we're not running a rediscovery # rediscovery happens in places like docker connections that could have different # python interpreters than the main host if not self._rediscovered_python: result['ansible_facts'][ self. _discovered_interpreter_key] = self._discovered_interpreter if self._discovery_warnings: if result.get('warnings') is None: result['warnings'] = [] result['warnings'].extend(self._discovery_warnings) if self._discovery_deprecation_warnings: if result.get('deprecations') is None: result['deprecations'] = [] result['deprecations'].extend(self._discovery_deprecation_warnings) return wrap_var(result)
def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=None, wrap_async=False): ''' Transfer and run a module along with its arguments. ''' if tmp is not None: display.warning('_execute_module no longer honors the tmp parameter. Action plugins' ' should set self._connection._shell.tmpdir to share the tmpdir') del tmp # No longer used if delete_remote_tmp is not None: display.warning('_execute_module no longer honors the delete_remote_tmp parameter.' ' Action plugins should check self._connection._shell.tmpdir to' ' see if a tmpdir existed before they were called to determine' ' if they are responsible for removing it.') del delete_remote_tmp # No longer used if task_vars is None: task_vars = dict() # if a module name was not specified for this execution, use the action from the task if module_name is None: module_name = self._task.action if module_args is None: module_args = self._task.args self._update_module_args(module_name, module_args, task_vars) # FUTURE: refactor this along with module build process to better encapsulate "smart wrapper" functionality (module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars) display.vvv("Using module file %s" % module_path) if not shebang and module_style != 'binary': raise AnsibleError("module (%s) is missing interpreter line" % module_name) tmpdir = self._connection._shell.tmpdir remote_module_path = None if not self._is_pipelining_enabled(module_style, wrap_async): # we might need remote tmp dir if tmpdir is None: self._make_tmp_path() tmpdir = self._connection._shell.tmpdir remote_module_filename = self._connection._shell.get_remote_filename(module_path) remote_module_path = self._connection._shell.join_path(tmpdir, remote_module_filename) args_file_path = None if module_style in ('old', 'non_native_want_json', 'binary'): # we'll also need a tmp file to hold our module arguments args_file_path = self._connection._shell.join_path(tmpdir, 'args') if remote_module_path or module_style != 'new': display.debug("transferring module to remote %s" % remote_module_path) if module_style == 'binary': self._transfer_file(module_path, remote_module_path) else: self._transfer_data(remote_module_path, module_data) if module_style == 'old': # we need to dump the module args to a k=v string in a file on # the remote system, which can be read and parsed by the module args_data = "" for k, v in iteritems(module_args): args_data += '%s=%s ' % (k, shlex_quote(text_type(v))) self._transfer_data(args_file_path, args_data) elif module_style in ('non_native_want_json', 'binary'): self._transfer_data(args_file_path, json.dumps(module_args)) display.debug("done transferring module to remote") environment_string = self._compute_environment_string() remote_files = [] if tmpdir and remote_module_path: remote_files = [tmpdir, remote_module_path] if args_file_path: remote_files.append(args_file_path) sudoable = True in_data = None cmd = "" if wrap_async and not self._connection.always_pipeline_modules: # configure, upload, and chmod the async_wrapper module (async_module_style, shebang, async_module_data, async_module_path) = self._configure_module(module_name='async_wrapper', module_args=dict(), task_vars=task_vars) async_module_remote_filename = self._connection._shell.get_remote_filename(async_module_path) remote_async_module_path = self._connection._shell.join_path(tmpdir, async_module_remote_filename) self._transfer_data(remote_async_module_path, async_module_data) remote_files.append(remote_async_module_path) async_limit = self._task.async_val async_jid = str(random.randint(0, 999999999999)) # call the interpreter for async_wrapper directly # this permits use of a script for an interpreter on non-Linux platforms # TODO: re-implement async_wrapper as a regular module to avoid this special case interpreter = shebang.replace('#!', '').strip() async_cmd = [interpreter, remote_async_module_path, async_jid, async_limit, remote_module_path] if environment_string: async_cmd.insert(0, environment_string) if args_file_path: async_cmd.append(args_file_path) else: # maintain a fixed number of positional parameters for async_wrapper async_cmd.append('_') if not self._should_remove_tmp_path(tmpdir): async_cmd.append("-preserve_tmp") cmd = " ".join(to_text(x) for x in async_cmd) else: if self._is_pipelining_enabled(module_style): in_data = module_data else: cmd = remote_module_path cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path).strip() # Fix permissions of the tmpdir path and tmpdir files. This should be called after all # files have been transferred. if remote_files: # remove none/empty remote_files = [x for x in remote_files if x] self._fixup_perms2(remote_files, self._play_context.remote_user) # actually execute res = self._low_level_execute_command(cmd, sudoable=sudoable, in_data=in_data) # parse the main result data = self._parse_returned_data(res) # NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE # get internal info before cleaning if data.pop("_ansible_suppress_tmpdir_delete", False): self._cleanup_remote_tmp = False # remove internal keys remove_internal_keys(data) if wrap_async: # async_wrapper will clean up its tmpdir on its own so we want the controller side to # forget about it now self._connection._shell.tmpdir = None # FIXME: for backwards compat, figure out if still makes sense data['changed'] = True # pre-split stdout/stderr into lines if needed if 'stdout' in data and 'stdout_lines' not in data: # if the value is 'False', a default won't catch it. txt = data.get('stdout', None) or u'' data['stdout_lines'] = txt.splitlines() if 'stderr' in data and 'stderr_lines' not in data: # if the value is 'False', a default won't catch it. txt = data.get('stderr', None) or u'' data['stderr_lines'] = txt.splitlines() display.debug("done with _execute_module (%s, %s)" % (module_name, module_args)) return data