Esempio n. 1
0
def run_command(
    cmd,
    stdin=None,
    stdout=subprocess.PIPE,
    stderr=subprocess.PIPE,
    shell=False,
    cwd=None,
    env=None,
):
    """
    Run the provided command in a subprocess and wait until it completes.

    :param cmd: Command to run.
    :type cmd: ``str`` or ``list``

    :param stdin: Process stdin.
    :type stdin: ``object``

    :param stdout: Process stdout.
    :type stdout: ``object``

    :param stderr: Process stderr.
    :type stderr: ``object``

    :param shell: True to use a shell.
    :type shell ``boolean``

    :param cwd: Optional working directory.
    :type cwd: ``str``

    :param env: Optional environment to use with the command. If not provided,
                environment from the current process is inherited.
    :type env: ``dict``

    :rtype: ``tuple`` (exit_code, stdout, stderr)
    """
    if not isinstance(cmd, (list, tuple) + six.string_types):
        raise TypeError(
            f"Command must be a type of list, tuple, or string, not '{type(cmd)}'."
        )

    if not env:
        env = os.environ.copy()

    process = concurrency.subprocess_popen(
        args=cmd,
        stdin=stdin,
        stdout=stdout,
        stderr=stderr,
        env=env,
        cwd=cwd,
        shell=shell,
    )
    stdout, stderr = process.communicate()
    exit_code = process.returncode

    if six.PY3:
        return (exit_code, stdout.decode(), stderr.decode())
    else:
        return (exit_code, stdout, stderr)
Esempio n. 2
0
File: shell.py Progetto: r0m4n-z/st2
def run_command(
    cmd,
    stdin=None,
    stdout=subprocess.PIPE,
    stderr=subprocess.PIPE,
    shell=False,
    cwd=None,
    env=None,
    timeout=60,
    preexec_func=None,
    kill_func=None,
    read_stdout_func=None,
    read_stderr_func=None,
    read_stdout_buffer=None,
    read_stderr_buffer=None,
    stdin_value=None,
    bufsize=0,
):
    """
    Run the provided command in a subprocess and wait until it completes.

    :param cmd: Command to run.
    :type cmd: ``str`` or ``list``

    :param stdin: Process stdin.
    :type stdin: ``object``

    :param stdout: Process stdout.
    :type stdout: ``object``

    :param stderr: Process stderr.
    :type stderr: ``object``

    :param shell: True to use a shell.
    :type shell ``boolean``

    :param cwd: Optional working directory.
    :type cwd: ``str``

    :param env: Optional environment to use with the command. If not provided,
                environment from the current process is inherited.
    :type env: ``dict``

    :param timeout: How long to wait before timing out.
    :type timeout: ``float``

    :param preexec_func: Optional pre-exec function.
    :type preexec_func: ``callable``

    :param kill_func: Optional function which will be called on timeout to kill the process.
                      If not provided, it defaults to `process.kill`
    :type kill_func: ``callable``

    :param read_stdout_func: Function which is responsible for reading process stdout when
                                 using live read mode.
    :type read_stdout_func: ``func``

    :param read_stdout_func: Function which is responsible for reading process stderr when
                                 using live read mode.
    :type read_stdout_func: ``func``

    :param bufsize: Buffer size argument to pass to subprocess.popen function.
    :type bufsize: ``int``

    :rtype: ``tuple`` (exit_code, stdout, stderr, timed_out)
    """
    LOG.debug("Entering st2common.util.green.run_command.")

    if not isinstance(cmd, (list, tuple) + six.string_types):
        raise TypeError(
            f"Command must be a type of list, tuple, or string, not '{type(cmd)}'."
        )

    if (read_stdout_func and not read_stderr_func) or (
        read_stderr_func and not read_stdout_func
    ):
        raise ValueError(
            "Both read_stdout_func and read_stderr_func arguments need "
            "to be provided."
        )

    if read_stdout_func and not (read_stdout_buffer or read_stderr_buffer):
        raise ValueError(
            "read_stdout_buffer and read_stderr_buffer arguments need to be provided "
            "when read_stdout_func is provided"
        )

    if not env:
        LOG.debug("env argument not provided. using process env (os.environ).")
        env = os.environ.copy()

    subprocess = concurrency.get_subprocess_module()

    # Note: We are using eventlet / gevent friendly implementation of subprocess which uses
    # GreenPipe so it doesn't block
    LOG.debug("Creating subprocess.")
    process = concurrency.subprocess_popen(
        args=cmd,
        stdin=stdin,
        stdout=stdout,
        stderr=stderr,
        env=env,
        cwd=cwd,
        shell=shell,
        preexec_fn=preexec_func,
        bufsize=bufsize,
    )

    if read_stdout_func:
        LOG.debug("Spawning read_stdout_func function")
        read_stdout_thread = concurrency.spawn(
            read_stdout_func, process.stdout, read_stdout_buffer
        )

    if read_stderr_func:
        LOG.debug("Spawning read_stderr_func function")
        read_stderr_thread = concurrency.spawn(
            read_stderr_func, process.stderr, read_stderr_buffer
        )

    def on_timeout_expired(timeout):
        global timed_out

        try:
            LOG.debug("Starting process wait inside timeout handler.")
            process.wait(timeout=timeout)
        except subprocess.TimeoutExpired:
            # Command has timed out, kill the process and propagate the error.
            # Note: We explicitly set the returncode to indicate the timeout.
            LOG.debug("Command execution timeout reached.")

            # NOTE: It's important we set returncode twice - here and below to avoid race in this
            # function because "kill_func()" is async and "process.kill()" is not.
            process.returncode = TIMEOUT_EXIT_CODE

            if kill_func:
                LOG.debug("Calling kill_func.")
                kill_func(process=process)
            else:
                LOG.debug("Killing process.")
                process.kill()

            # NOTE: It's imporant to set returncode here as well, since call to process.kill() sets
            # it and overwrites it if we set it earlier.
            process.returncode = TIMEOUT_EXIT_CODE

            if read_stdout_func and read_stderr_func:
                LOG.debug("Killing read_stdout_thread and read_stderr_thread")
                concurrency.kill(read_stdout_thread)
                concurrency.kill(read_stderr_thread)

    LOG.debug("Spawning timeout handler thread.")
    timeout_thread = concurrency.spawn(on_timeout_expired, timeout)
    LOG.debug("Attaching to process.")

    if stdin_value:
        if six.PY3:
            stdin_value = stdin_value.encode("utf-8")

        process.stdin.write(stdin_value)

    if read_stdout_func and read_stderr_func:
        LOG.debug("Using real-time stdout and stderr read mode, calling process.wait()")
        process.wait()
    else:
        LOG.debug(
            "Using delayed stdout and stderr read mode, calling process.communicate()"
        )
        stdout, stderr = process.communicate()

    concurrency.cancel(timeout_thread)
    exit_code = process.returncode

    if read_stdout_func and read_stderr_func:
        # Wait on those green threads to finish reading from stdout and stderr before continuing
        concurrency.wait(read_stdout_thread)
        concurrency.wait(read_stderr_thread)

        stdout = read_stdout_buffer.getvalue()
        stderr = read_stderr_buffer.getvalue()

    if exit_code == TIMEOUT_EXIT_CODE:
        LOG.debug("Timeout.")
        timed_out = True
    else:
        LOG.debug("No timeout.")
        timed_out = False

    LOG.debug("Returning.")
    return (exit_code, stdout, stderr, timed_out)
Esempio n. 3
0
    def _run(self, action):
        env_vars = self._env

        if not self.entry_point:
            script_action = False
        else:
            script_action = True

        args = action.get_full_command_string()
        sanitized_args = action.get_sanitized_full_command_string()

        # For consistency with the old Fabric based runner, make sure the file is executable
        if script_action:
            script_local_path_abs = self.entry_point
            args = 'chmod +x %s ; %s' % (script_local_path_abs, args)
            sanitized_args = 'chmod +x %s ; %s' % (script_local_path_abs,
                                                   sanitized_args)

        env = os.environ.copy()

        # Include user provided env vars (if any)
        env.update(env_vars)

        # Include common st2 env vars
        st2_env_vars = self._get_common_action_env_variables()
        env.update(st2_env_vars)

        LOG.info('Executing action via LocalRunner: %s', self.runner_id)
        LOG.info(
            '[Action info] name: %s, Id: %s, command: %s, user: %s, sudo: %s' %
            (action.name, action.action_exec_id, sanitized_args, action.user,
             action.sudo))

        stdout = StringIO()
        stderr = StringIO()

        store_execution_stdout_line = functools.partial(
            store_execution_output_data, output_type='stdout')
        store_execution_stderr_line = functools.partial(
            store_execution_output_data, output_type='stderr')

        read_and_store_stdout = make_read_and_store_stream_func(
            execution_db=self.execution,
            action_db=self.action,
            store_data_func=store_execution_stdout_line)
        read_and_store_stderr = make_read_and_store_stream_func(
            execution_db=self.execution,
            action_db=self.action,
            store_data_func=store_execution_stderr_line)

        subprocess = concurrency.get_subprocess_module()

        # If sudo password is provided, pass it to the subprocess via stdin>
        # Note: We don't need to explicitly escape the argument because we pass command as a list
        # to subprocess.Popen and all the arguments are escaped by the function.
        if self._sudo_password:
            LOG.debug('Supplying sudo password via stdin')
            echo_process = concurrency.subprocess_popen(
                ['echo', self._sudo_password + '\n'], stdout=subprocess.PIPE)
            stdin = echo_process.stdout
        else:
            stdin = None

        # Make sure os.setsid is called on each spawned process so that all processes
        # are in the same group.

        # Process is started as sudo -u {{system_user}} -- bash -c {{command}}. Introduction of the
        # bash means that multiple independent processes are spawned without them being
        # children of the process we have access to and this requires use of pkill.
        # Ideally os.killpg should have done the trick but for some reason that failed.
        # Note: pkill will set the returncode to 143 so we don't need to explicitly set
        # it to some non-zero value.
        exit_code, stdout, stderr, timed_out = shell.run_command(
            cmd=args,
            stdin=stdin,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            shell=True,
            cwd=self._cwd,
            env=env,
            timeout=self._timeout,
            preexec_func=os.setsid,
            kill_func=kill_process,
            read_stdout_func=read_and_store_stdout,
            read_stderr_func=read_and_store_stderr,
            read_stdout_buffer=stdout,
            read_stderr_buffer=stderr)

        error = None

        if timed_out:
            error = 'Action failed to complete in %s seconds' % (self._timeout)
            exit_code = -1 * exit_code_constants.SIGKILL_EXIT_CODE

        # Detect if user provided an invalid sudo password or sudo is not configured for that user
        if self._sudo_password:
            if re.search(r'sudo: \d+ incorrect password attempts', stderr):
                match = re.search(r'\[sudo\] password for (.+?)\:', stderr)

                if match:
                    username = match.groups()[0]
                else:
                    username = '******'

                error = (
                    'Invalid sudo password provided or sudo is not configured for this user '
                    '(%s)' % (username))
                exit_code = -1

        succeeded = (exit_code == exit_code_constants.SUCCESS_EXIT_CODE)

        result = {
            'failed': not succeeded,
            'succeeded': succeeded,
            'return_code': exit_code,
            'stdout': strip_shell_chars(stdout),
            'stderr': strip_shell_chars(stderr)
        }

        if error:
            result['error'] = error

        status = PROC_EXIT_CODE_TO_LIVEACTION_STATUS_MAP.get(
            str(exit_code), action_constants.LIVEACTION_STATUS_FAILED)

        return (status,
                jsonify.json_loads(result,
                                   BaseLocalShellRunner.KEYS_TO_TRANSFORM),
                None)
Esempio n. 4
0
def run_command(
    cmd,
    stdin=None,
    stdout=subprocess.PIPE,
    stderr=subprocess.PIPE,
    shell=False,
    cwd=None,
    env=None,
    close_fds=None,
):
    """
    Run the provided command in a subprocess and wait until it completes.

    :param cmd: Command to run.
    :type cmd: ``str`` or ``list``

    :param stdin: Process stdin.
    :type stdin: ``object``

    :param stdout: Process stdout.
    :type stdout: ``object``

    :param stderr: Process stderr.
    :type stderr: ``object``

    :param shell: True to use a shell.
    :type shell ``boolean``

    :param cwd: Optional working directory.
    :type cwd: ``str``

    :param env: Optional environment to use with the command. If not provided,
                environment from the current process is inherited.
    :type env: ``dict``

    :param close_fds: True to close all the fds. By default when None is provided we rely on
                      default upstream behavior which may be Python version specific.

    :rtype: ``tuple`` (exit_code, stdout, stderr)
    """
    if not isinstance(cmd, (list, tuple) + six.string_types):
        raise TypeError(
            f"Command must be a type of list, tuple, or string, not '{type(cmd)}'."
        )

    if not env:
        env = os.environ.copy()

    kwargs = {}
    if close_fds is not None:
        kwargs["close_fds"] = close_fds

    process = concurrency.subprocess_popen(
        args=cmd,
        stdin=stdin,
        stdout=stdout,
        stderr=stderr,
        env=env,
        cwd=cwd,
        shell=shell,
        **kwargs,
    )
    stdout, stderr = process.communicate()
    exit_code = process.returncode

    if six.PY3:
        return (exit_code, stdout.decode(), stderr.decode())
    else:
        return (exit_code, stdout, stderr)