Esempio n. 1
0
    def __init__(self, name, reason, details=None):
        # NB: this is late imported else we have a circular dependency that's hard to fix
        from simpleflow.format import decode

        self.name = name
        self.reason = decode(reason, parse_json=False, use_proxy=False)
        self.details = decode(details, parse_json=False, use_proxy=False)

        super(TaskFailed, self).__init__(name, self.reason, self.details)
Esempio n. 2
0
    def test_jumbo_fields_in_task_failed_is_decoded(self):
        # prepare execution
        self.register_activity_type(
            "tests.test_simpleflow.swf.test_executor.print_me_n_times", "default"
        )

        # start execution
        self.start_workflow_execution(
            input='{"args": ["012345679", 10000], "kwargs": {"raises": true}}',
        )

        # decider part
        result = self.build_decisions(ExampleJumboWorkflow)
        assert len(result.decisions) == 1
        self.take_decisions(result.decisions, result.execution_context)

        # worker part
        self.process_activity_task()

        # now check the history
        events = self.get_workflow_execution_history()["events"]

        activity_result_evt = events[-2]
        assert activity_result_evt["eventType"] == "ActivityTaskFailed"
        attrs = activity_result_evt["activityTaskFailedEventAttributes"]
        expect(attrs["reason"]).to.match(
            r"simpleflow\+s3://jumbo-bucket/[a-z0-9-]+ 9\d{4}"
        )
        expect(attrs["details"]).to.match(
            r"simpleflow\+s3://jumbo-bucket/[a-z0-9-]+ 9\d{4}"
        )
        details = format.decode(attrs["details"])
        expect(details["error"]).to.equal("ValueError")
        expect(len(details["message"])).to.be.greater_than(9 * 10000)

        # decide again (should lead to workflow failure)
        result = self.build_decisions(ExampleJumboWorkflow)
        assert len(result.decisions) == 1
        assert result.decisions[0]["decisionType"] == "FailWorkflowExecution"
        self.take_decisions(result.decisions, result.execution_context)

        # now check history again
        events = self.get_workflow_execution_history()["events"]

        event = events[-1]
        assert event["eventType"] == "WorkflowExecutionFailed"
        attrs = event["workflowExecutionFailedEventAttributes"]

        details = format.decode(attrs["details"], use_proxy=False)
        expect(details).to.be.a("dict")
        expect(details["message"]).to.match(r"^Number: 012345.*")

        reason = format.decode(attrs["reason"], use_proxy=False)
        expect(reason).to.match(
            r"^Workflow execution error in activity-tests.test_simpleflow.swf."
            r'test_executor.print_me_n_times: "ValueError: Number: 012345679\d+"$'
        )
Esempio n. 3
0
    def __init__(self, name, reason, details=None):
        # NB: this is late imported else we have a circular dependency that's hard to fix
        from simpleflow.format import decode

        self.name = name
        self.reason = decode(reason, parse_json=False, use_proxy=False)
        self.details = decode(details, parse_json=False, use_proxy=False)

        super(TaskFailed, self).__init__(name, self.reason, self.details)
Esempio n. 4
0
    def test_jumbo_fields_in_task_failed_is_decoded(self):
        # prepare execution
        self.register_activity_type(
            "tests.test_simpleflow.swf.test_executor.print_me_n_times",
            "default"
        )

        # start execution
        self.start_workflow_execution(
            input='{"args": ["012345679", 10000], "kwargs": {"raises": true}}',
        )

        # decider part
        result = self.build_decisions(ExampleJumboWorkflow)
        assert len(result.decisions) == 1
        self.take_decisions(result.decisions, result.execution_context)

        # worker part
        self.process_activity_task()

        # now check the history
        events = self.get_workflow_execution_history()["events"]

        activity_result_evt = events[-2]
        assert activity_result_evt["eventType"] == "ActivityTaskFailed"
        attrs = activity_result_evt["activityTaskFailedEventAttributes"]
        expect(attrs["reason"]).to.match(r"simpleflow\+s3://jumbo-bucket/[a-z0-9-]+ 9\d{4}")
        expect(attrs["details"]).to.match(r"simpleflow\+s3://jumbo-bucket/[a-z0-9-]+ 9\d{4}")
        details = format.decode(attrs["details"])
        expect(details["error"]).to.equal("ValueError")
        expect(len(details["message"])).to.be.greater_than(9*10000)

        # decide again (should lead to workflow failure)
        result = self.build_decisions(ExampleJumboWorkflow)
        assert len(result.decisions) == 1
        assert result.decisions[0]["decisionType"] == "FailWorkflowExecution"
        self.take_decisions(result.decisions, result.execution_context)

        # now check history again
        events = self.get_workflow_execution_history()["events"]

        event = events[-1]
        assert event["eventType"] == "WorkflowExecutionFailed"
        attrs = event["workflowExecutionFailedEventAttributes"]

        details = format.decode(attrs["details"], use_proxy=False)
        expect(details).to.be.a("dict")
        expect(details["message"]).to.match(r"^Number: 012345.*")

        reason = format.decode(attrs["reason"], use_proxy=False)
        expect(reason).to.match(
            r'^Workflow execution error in activity-tests.test_simpleflow.swf.'
            r'test_executor.print_me_n_times: "ValueError: Number: 012345679\d+"$'
        )
Esempio n. 5
0
 def list_markers(self, all=False):
     if all:
         return [
             Marker(m['name'], format.decode(m['details']))
             for ml in self._history.markers.values() for m in ml
         ]
     rc = []
     for ml in self._history.markers.values():
         m = ml[-1]
         if m['state'] == 'recorded':
             rc.append(Marker(m['name'], format.decode(m['details'])))
     return rc
Esempio n. 6
0
 def _extract_reason(err):
     if hasattr(err.exception, 'reason'):
         raw = err.exception.reason
         # don't parse eventual json object here, since we will cast
         # the result to a string anyway, better keep a json representation
         return format.decode(raw, parse_json=False, use_proxy=False)
     return repr(err.exception)
Esempio n. 7
0
 def get_event_details(self, event_type, event_name):
     if event_type == 'signal':
         return self._history.signals.get(event_name)
     elif event_type == 'marker':
         marker_list = self._history.markers.get(event_name)
         if not marker_list:
             return None
         marker_list = list(
             filter(
                 lambda m: m['state'] == 'recorded',
                 marker_list
             )
         )
         if not marker_list:
             return None
         # Make pleasing details
         marker = copy.copy(marker_list[-1])
         marker['details'] = format.decode(marker['details'])
         return marker
     elif event_type == 'timer':
         return self._history.timers.get(event_name)
     else:
         raise ValueError('Unimplemented type {!r} for get_event_details'.format(
             event_type
         ))
Esempio n. 8
0
    def _get_future_from_child_workflow_event(self, event):
        """Maps a child workflow event to a Future with the corresponding
        state.

        :param event: child workflow event
        :type  event: dict[str, Any]
        """
        future = futures.Future()
        state = event['state']

        if state == 'start_initiated':
            pass  # future._state = futures.PENDING
        elif state == 'start_failed':
            if event['cause'] == 'WORKFLOW_TYPE_DOES_NOT_EXIST':
                workflow_type = swf.models.WorkflowType(
                    self.domain,
                    name=event['name'],
                    version=event['version'],
                )
                logger.info('Creating workflow type {} in domain {}'.format(
                    workflow_type.name,
                    self.domain.name,
                ))
                try:
                    workflow_type.save()
                except swf.exceptions.AlreadyExistsError:
                    # Could have be created by a concurrent workflow execution.
                    pass
                return None
            future.set_exception(exceptions.TaskFailed(
                name=event['id'],
                reason=event['cause'],
                details=event.get('details'),
            ))
        elif state == 'started':
            future.set_running()
        elif state == 'completed':
            future.set_finished(format.decode(event['result']))
        elif state == 'failed':
            future.set_exception(exceptions.TaskFailed(
                name=event['id'],
                reason=event['reason'],
                details=event.get('details'),
            ))
        elif state == 'timed_out':
            future.set_exception(exceptions.TimeoutError(
                event['timeout_type'],
                None,
            ))
        elif state == 'canceled':
            future.set_exception(exceptions.TaskCanceled(
                event.get('details'),
            ))
        elif state == 'terminated':
            future.set_exception(exceptions.TaskTerminated())

        return future
Esempio n. 9
0
    def process(self, poller, token, task):
        """

        :param poller:
        :type poller: ActivityPoller
        :param token:
        :type token: str
        :param task:
        :type task: swf.models.ActivityTask
        """
        logger.debug('ActivityWorker.process() pid={}'.format(os.getpid()))
        try:
            activity = self.dispatch(task)
            input = format.decode(task.input)
            args = input.get('args', ())
            kwargs = input.get('kwargs', {})
            context = sanitize_activity_context(task.context)
            context['domain_name'] = poller.domain.name
            if input.get('meta', {}).get('binaries'):
                download_binaries(input['meta']['binaries'])
            result = ActivityTask(activity, *args, context=context, **kwargs).execute()
        except Exception:
            exc_type, exc_value, exc_traceback = sys.exc_info()
            logger.exception("process error: {}".format(str(exc_value)))
            if isinstance(exc_value, ExecutionError) and len(exc_value.args):
                details = exc_value.args[0]
                reason = format_exc(exc_value)  # FIXME json.loads and rebuild?
            else:
                tb = traceback.format_tb(exc_traceback)
                reason = format_exc(exc_value)
                details = json_dumps(
                    {
                        'error': exc_type.__name__,
                        'message': str(exc_value),
                        'traceback': tb,
                    },
                    default=repr
                )
            return poller.fail_with_retry(
                token,
                task,
                reason=reason,
                details=details
            )

        try:
            logger.info('completing activity')
            poller.complete_with_retry(token, result)
        except Exception as err:
            logger.exception("complete error")
            reason = 'cannot complete task {}: {} {}'.format(
                task.activity_id,
                err.__class__.__name__,
                err,
            )
            poller.fail_with_retry(token, task, reason)
Esempio n. 10
0
    def _get_future_from_activity_event(self, event):
        """Maps an activity event to a Future with the corresponding state.

        :param event: activity event
        :type  event: dict[str, Any]
        :rtype: futures.Future

        """
        future = futures.Future()  # state is PENDING.
        state = event['state']

        if state == 'scheduled':
            pass
        elif state == 'schedule_failed':
            if event['cause'] == 'ACTIVITY_TYPE_DOES_NOT_EXIST':
                activity_type = swf.models.ActivityType(
                    self.domain,
                    name=event['activity_type']['name'],
                    version=event['activity_type']['version'])
                logger.info('creating activity type {} in domain {}'.format(
                    activity_type.name,
                    self.domain.name))
                try:
                    activity_type.save()
                except swf.exceptions.AlreadyExistsError:
                    logger.info(
                        'oops: Activity type {} in domain {} already exists, creation failed, continuing...'.format(
                            activity_type.name,
                            self.domain.name))
                return None
            logger.info('failed to schedule {}: {}'.format(
                event['activity_type']['name'],
                event['cause'],
            ))
            return None
        elif state == 'started':
            future.set_running()
        elif state == 'completed':
            result = event['result']
            future.set_finished(format.decode(result))
        elif state == 'canceled':
            future.set_cancelled()
        elif state == 'failed':
            exception = exceptions.TaskFailed(
                name=event['id'],
                reason=event['reason'],
                details=event.get('details'))
            future.set_exception(exception)
        elif state == 'timed_out':
            exception = exceptions.TimeoutError(
                event['timeout_type'],
                event['timeout_value'])
            future.set_exception(exception)

        return future
Esempio n. 11
0
    def test_decode(self):
        self.setup_jumbo_fields("jumbo-bucket")
        push_content("jumbo-bucket", "abc", "decoded jumbo field yay!")

        cases = [
            [None,                                  None],
            ["foo bar baz",                         "foo bar baz"],
            ['"a string"',                          "a string"],
            ['[1, 2]',                              [1, 2]],
            ["simpleflow+s3://jumbo-bucket/abc 24", "decoded jumbo field yay!"],
        ]

        for case in cases:
            self.assertEqual(case[1], format.decode(case[0]))
Esempio n. 12
0
def activity_rerun(domain, workflow_id, run_id, input, scheduled_id,
                   activity_id):
    # handle params
    if not activity_id and not scheduled_id:
        logger.error("Please supply --scheduled-id or --activity-id.")
        sys.exit(1)

    input_override = None
    if input:
        input_override = format.decode(input)

    # find workflow execution
    try:
        wfe = helpers.get_workflow_execution(domain, workflow_id, run_id)
    except (swf.exceptions.DoesNotExistError, IndexError):
        logger.error("Couldn't find execution, exiting.")
        sys.exit(1)
    logger.info("Found execution: workflowId={} runId={}".format(
        wfe.workflow_id, wfe.run_id))

    # now rerun the specified activity
    history = History(wfe.history())
    history.parse()
    task, args, kwargs, meta, params = helpers.find_activity(
        history,
        scheduled_id=scheduled_id,
        activity_id=activity_id,
        input=input_override,
    )
    kwargs["context"].update({
        "workflow_id": wfe.workflow_id,
        "run_id": wfe.run_id,
    })
    logger.debug("Found activity. Last execution:")
    for line in json_dumps(params, pretty=True).split("\n"):
        logger.debug(line)
    if input_override:
        logger.info("NB: input will be overriden with the passed one!")
    logger.info("Will re-run: {}(*{}, **{}) [+meta={}]".format(
        task, args, kwargs, meta))

    # download binaries if needed
    download_binaries(meta.get("binaries", {}))

    # execute the activity task with the correct arguments
    instance = ActivityTask(task, *args, **kwargs)
    result = instance.execute()
    if hasattr(instance, "post_execute"):
        instance.post_execute()
    logger.info("Result (JSON): {}".format(json_dumps(result, compact=False)))
Esempio n. 13
0
def activity_rerun(domain,
                   workflow_id,
                   run_id,
                   input,
                   scheduled_id,
                   activity_id):
    # handle params
    if not activity_id and not scheduled_id:
        logger.error("Please supply --scheduled-id or --activity-id.")
        sys.exit(1)

    input_override = None
    if input:
        input_override = format.decode(input)

    # find workflow execution
    try:
        wfe = helpers.get_workflow_execution(domain, workflow_id, run_id)
    except (swf.exceptions.DoesNotExistError, IndexError):
        logger.error("Couldn't find execution, exiting.")
        sys.exit(1)
    logger.info("Found execution: workflowId={} runId={}".format(wfe.workflow_id, wfe.run_id))

    # now rerun the specified activity
    history = History(wfe.history())
    history.parse()
    task, args, kwargs, meta, params = helpers.find_activity(
        history, scheduled_id=scheduled_id, activity_id=activity_id, input=input_override,
    )
    logger.debug("Found activity. Last execution:")
    for line in json_dumps(params, pretty=True).split("\n"):
        logger.debug(line)
    if input_override:
        logger.info("NB: input will be overriden with the passed one!")
    logger.info("Will re-run: {}(*{}, **{}) [+meta={}]".format(task, args, kwargs, meta))

    # download binaries if needed
    download_binaries(meta.get("binaries", {}))

    # execute the activity task with the correct arguments
    instance = ActivityTask(task, *args, **kwargs)
    result = instance.execute()
    if hasattr(instance, 'post_execute'):
        instance.post_execute()
    logger.info("Result (JSON): {}".format(json_dumps(result, compact=False)))
Esempio n. 14
0
        def execute(*args, **kwargs):
            logger = logging.getLogger(logger_name)
            command = 'simpleflow.execute'  # name of a module.
            sys.stdout.flush()
            sys.stderr.flush()
            result_str = None  # useless
            context = kwargs.pop('context', {})
            with tempfile.TemporaryFile() as result_fd, tempfile.TemporaryFile(
            ) as error_fd:
                dup_result_fd = os.dup(result_fd.fileno())  # remove FD_CLOEXEC
                dup_error_fd = os.dup(error_fd.fileno())  # remove FD_CLOEXEC
                # print('error_fd: {}'.format(dup_error_fd))
                full_command = [
                    interpreter,
                    '-m',
                    command,  # execute module a script.
                    get_name(func),
                    format_arguments_json(*args, **kwargs),
                    '--logger-name={}'.format(logger_name),
                    '--result-fd={}'.format(dup_result_fd),
                    '--error-fd={}'.format(dup_error_fd),
                    '--context={}'.format(json_dumps(context)),
                ]
                if kill_children:
                    full_command.append('--kill-children')
                if compat.PY2:  # close_fds doesn't work with python2 (using its C _posixsubprocess helper)
                    close_fds = False
                    pass_fds = ()
                else:
                    close_fds = True
                    pass_fds = (dup_result_fd, dup_error_fd)
                process = subprocess.Popen(
                    full_command,
                    bufsize=-1,
                    close_fds=close_fds,
                    pass_fds=pass_fds,
                )
                rc = wait_subprocess(process,
                                     timeout=timeout,
                                     command_info=full_command)
                os.close(dup_result_fd)
                os.close(dup_error_fd)
                if rc:
                    error_fd.seek(0)
                    err_output = error_fd.read()
                    if err_output:
                        if not compat.PY2:
                            err_output = err_output.decode('utf-8',
                                                           errors='replace')
                    raise ExecutionError(err_output)

                result_fd.seek(0)
                result_str = result_fd.read()

            if not result_str:
                return None
            try:
                if not compat.PY2:
                    result_str = result_str.decode('utf-8', errors='replace')
                result = format.decode(result_str)
                return result
            except BaseException as ex:
                logger.exception('Exception in python.execute: {} {}'.format(
                    ex.__class__.__name__, ex))
                logger.warning('%r', result_str)
Esempio n. 15
0
 def input(self, value):
     self._input = format.decode(value)
Esempio n. 16
0
 def control(self, value):
     self._control = format.decode(value)
Esempio n. 17
0
 def input(self, value):
     self._input = format.decode(value)
Esempio n. 18
0
 def control(self, value):
     self._control = format.decode(value)
Esempio n. 19
0
def get_input(wf_input):
    if not wf_input:
        wf_input = sys.stdin.read()
    wf_input = format.decode(wf_input)
    return transform_input(wf_input)
Esempio n. 20
0
def main():
    """
    When executed as a script, this module expects the name of a callable as
    its first argument and the arguments of the callable encoded in a JSON
    string as its second argument. It then executes the callable with the
    arguments after decoding them into Python objects. It finally encodes the
    value returned by the callable into a JSON string and prints it on stdout.

    the arguments of the callable are stored in a dict with the following
    format: ::

        {'args': [...],
         'kwargs': {
            ...,
         }
         }

    Synopsis
    --------

    ::
        usage: execute.py [-h] funcname funcargs

        positional arguments:
          funcname    name of the callable to execute
          funcargs    callable arguments in JSON

        optional arguments:
          -h, --help  show this help message and exit

    Examples
    --------

    ::
        $ python -m simpleflow.execute "os.path.exists" '{"args": ["/tmp"]}'
        true

    """
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument(
        'funcname',
        help='name of the callable to execute',
    )
    parser.add_argument(
        'funcargs',
        help='callable arguments in JSON',
    )
    parser.add_argument(
        '--context',
        help='Activity Context',
    )
    parser.add_argument(
        '--logger-name',
        help='logger name',
    )
    parser.add_argument(
        '--result-fd',
        type=int,
        default=1,
        metavar='N',
        help='result file descriptor',
    )
    parser.add_argument(
        '--error-fd',
        type=int,
        default=2,
        metavar='N',
        help='error file descriptor',
    )
    parser.add_argument(
        '--arguments-json-fd',
        type=int,
        default=None,
        metavar='N',
        help='JSON input file descriptor',
    )
    parser.add_argument(
        '--kill-children',
        action='store_true',
        help='kill child processes on exit',
    )
    cmd_arguments = parser.parse_args()

    def kill_child_processes():
        process = psutil.Process(os.getpid())
        children = process.children(recursive=True)

        for child in children:
            try:
                child.terminate()
            except psutil.NoSuchProcess:
                pass
        _, still_alive = psutil.wait_procs(children, timeout=0.3)
        for child in still_alive:
            try:
                child.kill()
            except psutil.NoSuchProcess:
                pass

    funcname = cmd_arguments.funcname
    if cmd_arguments.arguments_json_fd is None:
        content = cmd_arguments.funcargs
        if content is None:
            parser.error('the following arguments are required: funcargs')
    else:
        with os.fdopen(cmd_arguments.arguments_json_fd) as arguments_json_file:
            content = arguments_json_file.read()
    try:
        arguments = format.decode(content)
    except Exception:
        raise ValueError('cannot load arguments from {}'.format(
            content))
    if cmd_arguments.logger_name:
        logger = logging.getLogger(cmd_arguments.logger_name)
    else:
        logger = simpleflow_logger
    callable_ = make_callable(funcname)
    if hasattr(callable_, '__wrapped__'):
        callable_ = callable_.__wrapped__
    args = arguments.get('args', ())
    kwargs = arguments.get('kwargs', {})
    context = json.loads(cmd_arguments.context) if cmd_arguments.context is not None else None
    try:
        if hasattr(callable_, 'execute'):
            inst = callable_(*args, **kwargs)
            if context is not None:
                inst.context = context
            result = inst.execute()
            if hasattr(inst, 'post_execute'):
                inst.post_execute()
        else:
            if context is not None:
                callable_.context = context
            result = callable_(*args, **kwargs)
    except Exception as err:
        logger.error('Exception: {}'.format(err))
        exc_type, exc_value, exc_traceback = sys.exc_info()
        tb = traceback.format_tb(exc_traceback)
        details = json_dumps(
            {
                'error': exc_type.__name__,
                'message': str(exc_value),
                'traceback': tb,
            },
            default=repr,
        )
        if cmd_arguments.error_fd == 2:
            sys.stderr.flush()
        if not compat.PY2:
            details = details.encode('utf-8')
        os.write(cmd_arguments.error_fd, details)
        if cmd_arguments.kill_children:
            kill_child_processes()
        sys.exit(1)

    if cmd_arguments.result_fd == 1:  # stdout (legacy)
        sys.stdout.flush()  # may have print's in flight
        os.write(cmd_arguments.result_fd, b'\n')
    result = json_dumps(result)
    if not compat.PY2:
        result = result.encode('utf-8')
    os.write(cmd_arguments.result_fd, result)
    if cmd_arguments.kill_children:
        kill_child_processes()
Esempio n. 21
0
def get_input(wf_input):
    if not wf_input:
        wf_input = sys.stdin.read()
    wf_input = format.decode(wf_input)
    return transform_input(wf_input)
Esempio n. 22
0
        def execute(*args, **kwargs):
            logger = logging.getLogger(logger_name)
            command = 'simpleflow.execute'  # name of a module.
            sys.stdout.flush()
            sys.stderr.flush()
            result_str = None  # useless
            context = kwargs.pop('context', {})
            with tempfile.TemporaryFile() as result_fd, tempfile.TemporaryFile(
            ) as error_fd:
                dup_result_fd = os.dup(result_fd.fileno())  # remove FD_CLOEXEC
                dup_error_fd = os.dup(error_fd.fileno())  # remove FD_CLOEXEC
                arguments_json = format_arguments_json(*args, **kwargs)
                full_command = [
                    interpreter,
                    '-m',
                    command,  # execute module a script.
                    get_name(func),
                    '--logger-name={}'.format(logger_name),
                    '--result-fd={}'.format(dup_result_fd),
                    '--error-fd={}'.format(dup_error_fd),
                    '--context={}'.format(json_dumps(context)),
                ]
                if len(
                        arguments_json
                ) < MAX_ARGUMENTS_JSON_LENGTH:  # command-line limit on Linux: 128K
                    full_command.append(arguments_json)
                    arg_file = None
                    arg_fd = None
                else:
                    arg_file = tempfile.TemporaryFile()
                    arg_file.write(arguments_json.encode('utf-8'))
                    arg_file.flush()
                    arg_file.seek(0)
                    arg_fd = os.dup(arg_file.fileno())
                    full_command.append(
                        '--arguments-json-fd={}'.format(arg_fd))
                    full_command.append('foo')  # dummy funcarg
                if kill_children:
                    full_command.append('--kill-children')
                if is_buggy_subprocess32(
                ):  # close_fds doesn't work with subprocess32 < 3.5.0
                    close_fds = False
                    pass_fds = []
                else:
                    close_fds = True
                    pass_fds = [dup_result_fd, dup_error_fd]
                    if arg_file:
                        pass_fds.append(arg_fd)
                process = subprocess.Popen(
                    full_command,
                    bufsize=-1,
                    close_fds=close_fds,
                    pass_fds=pass_fds,
                )
                rc = wait_subprocess(process,
                                     timeout=timeout,
                                     command_info=full_command)
                os.close(dup_result_fd)
                os.close(dup_error_fd)
                if arg_file:
                    arg_file.close()
                if rc:
                    error_fd.seek(0)
                    err_output = error_fd.read()
                    if err_output:
                        if not compat.PY2:
                            err_output = err_output.decode('utf-8',
                                                           errors='replace')
                    raise ExecutionError(err_output)

                result_fd.seek(0)
                result_str = result_fd.read()

            if not result_str:
                return None
            try:
                if not compat.PY2:
                    result_str = result_str.decode('utf-8', errors='replace')
                result = format.decode(result_str)
                return result
            except BaseException as ex:
                logger.exception('Exception in python.execute: {} {}'.format(
                    ex.__class__.__name__, ex))
                logger.warning('%r', result_str)
Esempio n. 23
0
def main():
    """
    When executed as a script, this module expects the name of a callable as
    its first argument and the arguments of the callable encoded in a JSON
    string as its second argument. It then executes the callable with the
    arguments after decoding them into Python objects. It finally encodes the
    value returned by the callable into a JSON string and prints it on stdout.

    the arguments of the callable are stored in a dict with the following
    format: ::

        {'args': [...],
         'kwargs': {
            ...,
         }
         }

    Synopsis
    --------

    ::
        usage: execute.py [-h] funcname funcargs

        positional arguments:
          funcname    name of the callable to execute
          funcargs    callable arguments in JSON

        optional arguments:
          -h, --help  show this help message and exit

    Examples
    --------

    ::
        $ python -m simpleflow.execute "os.path.exists" '{"args": ["/tmp"]}'
        true

    """
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument(
        'funcname',
        help='name of the callable to execute',
    )
    parser.add_argument(
        'funcargs',
        help='callable arguments in JSON',
    )
    parser.add_argument(
        '--context',
        help='Activity Context',
    )
    parser.add_argument(
        '--logger-name',
        help='logger name',
    )
    parser.add_argument(
        '--result-fd',
        type=int,
        default=1,
        metavar='N',
        help='result file descriptor',
    )
    parser.add_argument(
        '--error-fd',
        type=int,
        default=2,
        metavar='N',
        help='error file descriptor',
    )
    parser.add_argument(
        '--arguments-json-fd',
        type=int,
        default=None,
        metavar='N',
        help='JSON input file descriptor',
    )
    parser.add_argument(
        '--kill-children',
        action='store_true',
        help='kill child processes on exit',
    )
    cmd_arguments = parser.parse_args()

    def kill_child_processes():
        process = psutil.Process(os.getpid())
        children = process.children(recursive=True)

        for child in children:
            try:
                child.terminate()
            except psutil.NoSuchProcess:
                pass
        _, still_alive = psutil.wait_procs(children, timeout=0.3)
        for child in still_alive:
            try:
                child.kill()
            except psutil.NoSuchProcess:
                pass

    funcname = cmd_arguments.funcname
    if cmd_arguments.arguments_json_fd is None:
        content = cmd_arguments.funcargs
        if content is None:
            parser.error('the following arguments are required: funcargs')
    else:
        with os.fdopen(cmd_arguments.arguments_json_fd) as arguments_json_file:
            content = arguments_json_file.read()
    try:
        arguments = format.decode(content)
    except Exception:
        raise ValueError('cannot load arguments from {}'.format(content))
    if cmd_arguments.logger_name:
        logger = logging.getLogger(cmd_arguments.logger_name)
    else:
        logger = simpleflow_logger
    callable_ = make_callable(funcname)
    if hasattr(callable_, '__wrapped__'):
        callable_ = callable_.__wrapped__
    args = arguments.get('args', ())
    kwargs = arguments.get('kwargs', {})
    context = json.loads(
        cmd_arguments.context) if cmd_arguments.context is not None else None
    try:
        if hasattr(callable_, 'execute'):
            inst = callable_(*args, **kwargs)
            if context is not None:
                inst.context = context
            result = inst.execute()
            if hasattr(inst, 'post_execute'):
                inst.post_execute()
        else:
            if context is not None:
                callable_.context = context
            result = callable_(*args, **kwargs)
    except Exception as err:
        logger.error('Exception: {}'.format(err))
        exc_type, exc_value, exc_traceback = sys.exc_info()
        tb = traceback.format_tb(exc_traceback)
        details = json_dumps(
            {
                'error': exc_type.__name__,
                'message': str(exc_value),
                'traceback': tb,
            },
            default=repr,
        )
        if cmd_arguments.error_fd == 2:
            sys.stderr.flush()
        if not compat.PY2:
            details = details.encode('utf-8')
        os.write(cmd_arguments.error_fd, details)
        if cmd_arguments.kill_children:
            kill_child_processes()
        sys.exit(1)

    if cmd_arguments.result_fd == 1:  # stdout (legacy)
        sys.stdout.flush()  # may have print's in flight
        os.write(cmd_arguments.result_fd, b'\n')
    result = json_dumps(result)
    if not compat.PY2:
        result = result.encode('utf-8')
    os.write(cmd_arguments.result_fd, result)
    if cmd_arguments.kill_children:
        kill_child_processes()
Esempio n. 24
0
def load_input(input_fp):
    if input_fp is None:
        input_fp = sys.stdin
    input = format.decode(input_fp)
    return transform_input(input)
Esempio n. 25
0
        def execute(*args, **kwargs):
            logger = logging.getLogger(logger_name)
            command = 'simpleflow.execute'  # name of a module.
            sys.stdout.flush()
            sys.stderr.flush()
            result_str = None  # useless
            context = kwargs.pop('context', {})
            with tempfile.TemporaryFile() as result_fd, tempfile.TemporaryFile() as error_fd:
                dup_result_fd = os.dup(result_fd.fileno())  # remove FD_CLOEXEC
                dup_error_fd = os.dup(error_fd.fileno())  # remove FD_CLOEXEC
                arguments_json = format_arguments_json(*args, **kwargs)
                full_command = [
                    interpreter, '-m', command,  # execute module a script.
                    get_name(func),
                    '--logger-name={}'.format(logger_name),
                    '--result-fd={}'.format(dup_result_fd),
                    '--error-fd={}'.format(dup_error_fd),
                    '--context={}'.format(json_dumps(context)),
                ]
                if len(arguments_json) < MAX_ARGUMENTS_JSON_LENGTH:  # command-line limit on Linux: 128K
                    full_command.append(arguments_json)
                    arg_file = None
                    arg_fd = None
                else:
                    arg_file = tempfile.TemporaryFile()
                    arg_file.write(arguments_json.encode('utf-8'))
                    arg_file.flush()
                    arg_file.seek(0)
                    arg_fd = os.dup(arg_file.fileno())
                    full_command.append('--arguments-json-fd={}'.format(arg_fd))
                    full_command.append('foo')  # dummy funcarg
                if kill_children:
                    full_command.append('--kill-children')
                if is_buggy_subprocess32():  # close_fds doesn't work with subprocess32 < 3.5.0
                    close_fds = False
                    pass_fds = []
                else:
                    close_fds = True
                    pass_fds = [dup_result_fd, dup_error_fd]
                    if arg_file:
                        pass_fds.append(arg_fd)
                process = subprocess.Popen(
                    full_command,
                    bufsize=-1,
                    close_fds=close_fds,
                    pass_fds=pass_fds,
                )
                rc = wait_subprocess(process, timeout=timeout, command_info=full_command)
                os.close(dup_result_fd)
                os.close(dup_error_fd)
                if arg_file:
                    arg_file.close()
                if rc:
                    error_fd.seek(0)
                    err_output = error_fd.read()
                    if err_output:
                        if not compat.PY2:
                            err_output = err_output.decode('utf-8', errors='replace')
                    raise ExecutionError(err_output)

                result_fd.seek(0)
                result_str = result_fd.read()

            if not result_str:
                return None
            try:
                if not compat.PY2:
                    result_str = result_str.decode('utf-8', errors='replace')
                result = format.decode(result_str)
                return result
            except BaseException as ex:
                logger.exception('Exception in python.execute: {} {}'.format(ex.__class__.__name__, ex))
                logger.warning('%r', result_str)
Esempio n. 26
0
def load_input(input_fp):
    if input_fp is None:
        input_fp = sys.stdin
    input = format.decode(input_fp)
    return transform_input(input)