def _process_payload_step(payload_str, obj):
    """
    Internal function to turn a json fsm payload (from an AWS Lambda event),
    into an fsm Context, and then dispatch the event and execute user code.

    This function is ONLY used in the AWS Step Function execution path.

    :param payload_str: a json string like '{"serialized": "data"}'
    :param obj: a dict to pass to fsm Context.dispatch(...)
    """
    payload = json.loads(payload_str)
    obj[OBJ.PAYLOAD] = payload_str
    fsm = Context.from_payload_dict(payload)
    logger.info('system_context=%s', fsm.system_context())
    logger.info('user_context.keys()=%s', fsm.user_context().keys())
    current_event = fsm.system_context().get(SYSTEM_CONTEXT.CURRENT_EVENT,
                                             STATE.PSEUDO_INIT)

    # all retries etc. are handled by AWS Step Function infrastructure
    # so this an entirely stripped down dispatch running ONLY the user
    # Actions, and NONE of the framework's retry etc. code.
    next_event = fsm.current_state.dispatch(fsm, current_event, obj)
    if next_event:
        fsm.current_event = next_event
        data = fsm.to_payload_dict()
        data[AWS.STEP_FUNCTION] = True
        return data
Ejemplo n.º 2
0
def _process_payload(payload_str, obj):
    """

    :param encoded:
    :param obj:
    :param lambda_context:
    :return:
    """
    payload = json.loads(payload_str)
    logger.info('payload=%s', payload)
    obj[OBJ.PAYLOAD] = payload_str
    fsm = Context.from_payload_dict(payload)
    current_event = fsm.system_context().get(SYSTEM_CONTEXT.CURRENT_EVENT, STATE.PSEUDO_INIT)
    fsm.dispatch(current_event, obj)
def _process_payload(payload_str, obj):
    """
    Internal function to turn a json fsm payload (from an AWS Lambda event),
    into an fsm Context, and then dispatch the event and execute user code.

    :param payload_str: a json string like '{"serialized": "data"}'
    :param obj: a dict to pass to fsm Context.dispatch(...)
    """
    payload = json.loads(payload_str)
    obj[OBJ.PAYLOAD] = payload_str
    fsm = Context.from_payload_dict(payload)
    logger.info('system_context=%s', fsm.system_context())
    logger.info('user_context.keys()=%s', fsm.user_context().keys())
    current_event = fsm.system_context().get(SYSTEM_CONTEXT.CURRENT_EVENT, STATE.PSEUDO_INIT)
    fsm.dispatch(current_event, obj)
Ejemplo n.º 4
0
    def execute(self, context, obj):
        """
        Action that launches and ECS task.

        The API for using this class is as follows:

        {
           'context_var': 'context_value',              # normal context variable
           'task_details': {                            # dictionary of all the states that run images
              'state_name_1': {                         # first state name (as in fsm.yaml)
                                                        # cluster to run image for state_name_1
                'cluster_arn': 'arn:aws:ecs:region:1234567890:cluster/foobar',
                'container_image': 'host/corp/image:12345' # image for state_name_1
              },
              'state_name_2': {                         # second state name (as in fsm.yaml)
                'cluster_arn': 'arn:aws:ecs:eu-west-1:1234567890:cluster/foobar',
                'container_image': 'host/corp/image:12345',
                'runner_task_definition': 'my_runner',  # alternative docker image runner task name
                'runner_container_name': 'my_runner'    # alternative docker image runner container name
              }
            },
            'clone_aws_credentials': True               # flag to copy aws creds from local environment
                                                        # to the container overrides - makes for easier
                                                        # local testing. alternatively, just add permanent
                                                        # credentials to your runner task.
        }

        :param context: a aws_lambda_fsm.fsm.Context instance
        :param obj: a dict
        :return: a string event, or None
        """

        # construct a version of the context that can be base64 encoded
        # and stuffed into a environment variable for the container program.
        # all the container program needs to do is extract this data, add
        # an event, and send the message onto sqs/kinesis/... since this is an
        # ENTRY action, we inspect the current transition for the state we
        # will be in AFTER this code executes.
        ctx = Context.from_payload_dict(context.to_payload_dict())
        ctx.current_state = context.current_transition.target
        ctx.steps += 1
        fsm_context = base64.b64encode(
            json.dumps(ctx.to_payload_dict(),
                       **json_dumps_additional_kwargs()))

        # now finally launch the ECS task using all the data from above
        # as well as tasks etc. specified when the state machine was run.
        state_to_task_details_map = context[TASK_DETAILS_KEY]
        task_details = state_to_task_details_map[
            context.current_transition.target.name]

        # this is the image the user wants to run
        cluster_arn = task_details[CLUSTER_ARN_KEY]
        container_image = task_details[CONTAINER_IMAGE_KEY]

        # this is the task that will run that image
        task_definition = task_details.get(RUNNER_TASK_DEFINITION_KEY,
                                           DEFAULT_RUNNER_TASK_NAME)
        container_name = task_details.get(RUNNER_CONTAINER_NAME_KEY,
                                          DEFAULT_RUNNER_CONTAINER_NAME)

        # setup the environment for the ECS task. this first set of variables
        # are used by the docker container runner image.
        environment = {
            ENVIRONMENT.FSM_CONTEXT: fsm_context,
            ENVIRONMENT.FSM_DOCKER_IMAGE: container_image
        }
        # this second set of variables are used by actual docker image that
        # does actual stuff (pdf processing etc.)
        for name, value in task_details.get(ENVIRONMENT_KEY, {}).items():
            environment[name] = value

        # store the environment and record the guid.
        guid, _ = store_environment(context, environment)

        # stuff the guid and a couple stream settings into the task
        # overrides. the guid allows the FSM_CONTEXT to be loaded from
        # storage, and the FSM_PRIMARY_STREAM_SOURCE allow the call
        # to send_next_event_for_dispatch call to succeed.
        env = [{
            AWS_ECS.CONTAINER_OVERRIDES.ENVIRONMENT.NAME:
            ENVIRONMENT.FSM_ENVIRONMENT_GUID_KEY,
            AWS_ECS.CONTAINER_OVERRIDES.ENVIRONMENT.VALUE: guid
        }, {
            AWS_ECS.CONTAINER_OVERRIDES.ENVIRONMENT.NAME:
            ENVIRONMENT.FSM_PRIMARY_STREAM_SOURCE,
            AWS_ECS.CONTAINER_OVERRIDES.ENVIRONMENT.VALUE:
            get_primary_stream_source() or ''
        }, {
            AWS_ECS.CONTAINER_OVERRIDES.ENVIRONMENT.NAME:
            ENVIRONMENT.FSM_SECONDARY_STREAM_SOURCE,
            AWS_ECS.CONTAINER_OVERRIDES.ENVIRONMENT.VALUE:
            get_secondary_stream_source() or ''
        }]

        # this is for local testing
        if context.get(CLONE_AWS_CREDENTIALS_KEY):
            _testing(env)

        # get an ECS connection and start a task.
        conn = get_connection(cluster_arn)

        # run the task
        conn.run_task(cluster=cluster_arn,
                      taskDefinition=task_definition,
                      overrides={
                          AWS_ECS.CONTAINER_OVERRIDES.KEY: [{
                              AWS_ECS.CONTAINER_OVERRIDES.CONTAINER_NAME:
                              container_name,
                              AWS_ECS.CONTAINER_OVERRIDES.ENVIRONMENT.KEY:
                              env
                          }]
                      })

        # entry actions do not return events
        return None