"Type": "Wait", "Seconds":10, "Next": "EndState" } } }""" items = ['{"lambda":"Success"}'] if __name__ == '__main__': # Initialise logger logger = init_logging(log_name='error_handling1') # Initialising OpenTracing. It's important to do this before the boto3.client # call as create_tracer "patches" boto3 to add the OpenTracing hooks. create_tracer("error_handling1", {"implementation": "Jaeger"}) # Initialise the boto3 client setting the endpoint_url to our local # ASL Workflow Engine # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html#boto3.session.Session.client sfn = boto3.client("stepfunctions", endpoint_url="http://localhost:4584") state_machine_arn = "arn:aws:states:local:0123456789:stateMachine:error_handling_state_machine" def create_state_machines(): # Create state machine using a dummy roleArn. If it already exists an # exception will be thrown, we ignore that but raise other exceptions. try: response = sfn.create_state_machine( name="error_handling_state_machine", definition=ASL, roleArn="arn:aws:iam::0123456789:role/service-role/MyRole")
'{"lambda":"Success"}', '{"lambda":"InternalErrorNotHandled"}', '{"lambda":"InternalErrorHandled"}', '{"lambda":"Timeout"}' ] items = ['{"lambda":"Success"}'] #items = ['{"lambda":"InternalErrorNotHandled"}'] #items = ['{"lambda":"InternalErrorHandled"}'] #items = ['{"lambda":"Timeout"}'] if __name__ == '__main__': # Initialise logger logger = init_logging(log_name='step_by_step') # Initialising OpenTracing. It's important to do this before the boto3.client # call as create_tracer "patches" boto3 to add the OpenTracing hooks. create_tracer("step_by_step", {"implementation": "Jaeger"}) # Initialise the boto3 client setting the endpoint_url to our local # ASL Workflow Engine # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html#boto3.session.Session.client sfn = boto3.client("stepfunctions", endpoint_url="http://localhost:4584") caller_state_machine_arn = "arn:aws:states:local:0123456789:stateMachine:caller_state_machine" state_machine_arn = "arn:aws:states:local:0123456789:stateMachine:simple_state_machine" def create_state_machines(): # Create state machine using a dummy roleArn. If it already exists an # exception will be thrown, we ignore that but raise other exceptions. try: response = sfn.create_state_machine( name="caller_state_machine",
connection.close() """ Note we use new_event_loop() not get_event_loop() here as we are running in a thread and by default there is no current event loop in threads. """ loop = asyncio.new_event_loop() loop.run_until_complete(connect()) loop.close() if __name__ == '__main__': """ Initialising OpenTracing here rather than in the Worker constructor as opentracing.tracer is a per process object not per thread. """ create_tracer("workers", {"implementation": "Jaeger"}) workers = [ "SuccessLambda", "TimeoutLambda", "InternalErrorHandledLambda", "InternalErrorNotHandledLambda", "mime-id", ] #workers = ["SuccessLambda"] for w in workers: worker = Worker(name=w) worker.start()
} } ] } } }""" items = ['{"lambda":"Success"}'] if __name__ == '__main__': # Initialise logger logger = init_logging(log_name="parallel2") # Initialising OpenTracing. It's important to do this before the boto3.client # call as create_tracer "patches" boto3 to add the OpenTracing hooks. create_tracer("parallel2", {"implementation": "Jaeger"}) # Initialise the boto3 client setting the endpoint_url to our local # ASL Workflow Engine # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html#boto3.session.Session.client sfn = boto3.client("stepfunctions", endpoint_url="http://localhost:4584") state_machine_arn = "arn:aws:states:local:0123456789:stateMachine:parallel2" def create_state_machines(): # Create state machine using a dummy roleArn. If it already exists an # exception will be thrown, we ignore that but raise other exceptions. try: response = sfn.create_state_machine( name="parallel2", definition=ASL, roleArn="arn:aws:iam::0123456789:role/service-role/MyRole")
"Type": "Pass", "End": true } } }""" items = ['[{"category": "reference", "author": "Nigel Rees", "title": "Sayings of the Century", "price": 8.95}, {"category": "fiction", "author": "Evelyn Waugh", "title": "Sword of Honour", "price": 12.99}, {"category": "fiction", "author": "Herman Melville", "title": "Moby Dick", "isbn": "0-553-21311-3", "price": 8.99}, {"category": "fiction", "author": "J. R. R. Tolkien", "title": "The Lord of the Rings", "isbn": "0-395-19395-8", "price": 22.99}]'] if __name__ == '__main__': # Initialise logger logger = init_logging(log_name='iterate1') # Initialising OpenTracing. It's important to do this before the boto3.client # call as create_tracer "patches" boto3 to add the OpenTracing hooks. create_tracer("iterate1", {"implementation": "Jaeger"}) # Initialise the boto3 client setting the endpoint_url to our local # ASL Workflow Engine # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html#boto3.session.Session.client sfn = boto3.client("stepfunctions", endpoint_url="http://localhost:4584") iterate1_state_machine_arn = "arn:aws:states:local:0123456789:stateMachine:iterate1_state_machine" child_state_machine_arn = "arn:aws:states:local:0123456789:stateMachine:child_state_machine" def create_state_machines(): # Create state machines using a dummy roleArn. If it already exists an # exception will be thrown, we ignore that but raise other exceptions. try: response = sfn.create_state_machine( name="iterate1_state_machine", definition=iterate1_ASL, roleArn="arn:aws:iam::0123456789:role/service-role/MyRole"
} }""" items = [""" { "items": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] } """] if __name__ == '__main__': # Initialise logger logger = init_logging(log_name="map2") # Initialising OpenTracing. It's important to do this before the boto3.client # call as create_tracer "patches" boto3 to add the OpenTracing hooks. create_tracer("map2", {"implementation": "Jaeger"}) # Initialise the boto3 client setting the endpoint_url to our local # ASL Workflow Engine # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html#boto3.session.Session.client sfn = boto3.client("stepfunctions", endpoint_url="http://localhost:4584") state_machine_arn = "arn:aws:states:local:0123456789:stateMachine:map2" def create_state_machines(): # Create state machine using a dummy roleArn. If it already exists an # exception will be thrown, we ignore that but raise other exceptions. try: response = sfn.create_state_machine( name="map2", definition=ASL, roleArn="arn:aws:iam::0123456789:role/service-role/MyRole" )
'{"lambda":"Success"}', '{"lambda":"InternalErrorNotHandled"}', '{"lambda":"InternalErrorHandled"}', '{"lambda":"Timeout"}' ] #items = ['{"lambda":"Success"}'] #items = ['{"lambda":"InternalErrorNotHandled"}'] #items = ['{"lambda":"InternalErrorHandled"}'] #items = ['{"lambda":"Timeout"}'] if __name__ == '__main__': # Initialise logger logger = init_logging(log_name='simple_state_machine2') # Initialising OpenTracing. It's important to do this before the boto3.client # call as create_tracer "patches" boto3 to add the OpenTracing hooks. create_tracer("simple_state_machine2", {"implementation": "Jaeger"}) # Initialise the boto3 client setting the endpoint_url to our local # ASL Workflow Engine # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html#boto3.session.Session.client sfn = boto3.client("stepfunctions", endpoint_url="http://localhost:4584") state_machine_arn = "arn:aws:states:local:0123456789:stateMachine:simple_state_machine" def create_state_machines(): # Create state machine using a dummy roleArn. If it already exists an # exception will be thrown, we ignore that but raise other exceptions. try: response = sfn.create_state_machine( name="simple_state_machine", definition=ASL, roleArn="arn:aws:iam::0123456789:role/service-role/MyRole")
def __init__(self, configuration_file): """ :param configuration_file: Path to coordinator configuration file :type configuration_file: str :raises IOError: If configuration file does not exist, or is not readable :raises ValueError: If configuration file does not contain valid JSON :raises AssertionError: If configuration file does not contain the required fields """ # Initialise logger self.logger = init_logging(log_name="asl_workflow_engine") # Load the configuration file. try: with open(configuration_file, "r") as fp: config = json.load(fp) self.logger.info("Creating WorkflowEngine") except IOError as e: self.logger.error("Unable to read configuration file: {}".format( configuration_file)) raise except ValueError as e: self.logger.error("Configuration file does not contain valid JSON") raise # Provide defaults for any unset config key config["event_queue"] = config.get("event_queue", {}) config["notifier"] = config.get("notifier", {}) config["state_engine"] = config.get("state_engine", {}) config["rest_api"] = config.get("rest_api", {}) config["tracer"] = config.get("tracer", {}) config["metrics"] = config.get("metrics", {}) """ Override config values if a field is set as an environment variable. There is also a USE_STRUCTURED_LOGGING environment variable used by the logger to select between automation friendly structured logging or more human readable "traditional" logs. """ eq = config["event_queue"] eq["queue_name"] = os.environ.get("EVENT_QUEUE_QUEUE_NAME", eq.get("queue_name")) eq["instance_id"] = os.environ.get("EVENT_QUEUE_INSTANCE_ID", eq.get("instance_id")) eq["queue_type"] = os.environ.get("EVENT_QUEUE_QUEUE_TYPE", eq.get("queue_type")) eq["connection_url"] = os.environ.get("EVENT_QUEUE_CONNECTION_URL", eq.get("connection_url")) eq["connection_options"] = os.environ.get( "EVENT_QUEUE_CONNECTION_OPTIONS", eq.get("connection_options")) eq["shared_event_consumer_capacity"] = os.environ.get( "EVENT_QUEUE_SHARED_EVENT_CONSUMER_CAPACITY", eq.get("shared_event_consumer_capacity")) eq["instance_event_consumer_capacity"] = os.environ.get( "EVENT_QUEUE_INSTANCE_EVENT_CONSUMER_CAPACITY", eq.get("instance_event_consumer_capacity")) eq["reply_to_consumer_capacity"] = os.environ.get( "EVENT_QUEUE_REPLY_TO_CONSUMER_CAPACITY", eq.get("reply_to_consumer_capacity")) no = config["notifier"] no["topic"] = os.environ.get("NOTIFIER_TOPIC", no.get("topic")) se = config["state_engine"] se["store_url"] = os.environ.get("STATE_ENGINE_STORE_URL", se.get("store_url")) se["execution_ttl"] = os.environ.get("STATE_ENGINE_EXECUTION_TTL", se.get("execution_ttl", 86400)) ra = config["rest_api"] ra["host"] = os.environ.get("REST_API_HOST", ra.get("host")) ra["port"] = int(os.environ.get("REST_API_PORT", ra.get("port"))) ra["region"] = os.environ.get("REST_API_REGION", ra.get("region")) tr = config["tracer"] tr["implementation"] = os.environ.get("TRACER_IMPLEMENTATION", tr.get("implementation", "None")) # The Jaeger specific env vars are derived from this document: # https://www.jaegertracing.io/docs/1.22/client-features/ sampler = tr["config"]["sampler"] sampler["type"] = os.environ.get("JAEGER_SAMPLER_TYPE", sampler.get("type")) sampler["param"] = os.environ.get("JAEGER_SAMPLER_PARAM", sampler.get("param")) metrics = config["metrics"] metrics["implementation"] = os.environ.get( "METRICS_IMPLEMENTATION", metrics.get("implementation", "None")) metrics["namespace"] = os.environ.get("METRICS_NAMESPACE", metrics.get("namespace", "")) """ Initialise opentracing.tracer before creating the StateEngine, EventDispatcher and RestAPIinstances. Call asyncio.get_event_loop() here, because if we are using asyncio we want the tracer to use the main asyncio event loop rather than create a new ThreadLoop, which is the default behaviour unless a tornado IOLoop is passed. In recent versions of Tornado that delegates to asyncio loop. """ if eq["queue_type"].endswith("-asyncio"): # Attempt to use uvloop libuv based event loop if available # https://github.com/MagicStack/uvloop try: import uvloop uvloop.install() self.logger.info("Using uvloop asyncio event loop") except: # Fall back to standard library asyncio epoll event loop self.logger.info("Using standard library asyncio event loop") loop = asyncio.get_event_loop() create_tracer("asl_workflow_engine", config["tracer"], use_asyncio=True) else: create_tracer("asl_workflow_engine", config["tracer"]) self.state_engine = StateEngine(config) self.event_dispatcher = EventDispatcher(self.state_engine, config) self.config = config