def test_task_entry_serialisation(self):
        serialiser = DefaultSerialiser(known_proto_types=[Address])

        args = (1, '2', Address(namespace='test'))
        kwargs = {'arg': [1, 2, 3]}
        parameters = {'is_fruitful': True}

        entry = Task.from_fields('task_id',
                                 'task_type',
                                 args,
                                 kwargs,
                                 **parameters,
                                 is_finally=True)
        entry.mark_complete()

        entry_proto = entry.to_proto(serialiser)
        reconsituted_entry = Task.from_proto(entry_proto).unpack(serialiser)

        self.assertEqual(entry_proto.task_entry.request.type_url,
                         'type.googleapis.com/statefun_tasks.ArgsAndKwargs')
        self.assertEqual(reconsituted_entry.task_id, entry.task_id)
        self.assertEqual(reconsituted_entry.task_type, entry.task_type)
        self.assertEqual(reconsituted_entry.is_fruitful, True)
        self.assertEqual(reconsituted_entry.is_finally, True)
        self.assertEqual(reconsituted_entry.is_complete(), True)
        self.assertEqual(reconsituted_entry.to_tuple(), entry.to_tuple())
Exemplo n.º 2
0
    def __init__(self,
                 kafka_broker_url,
                 request_topic,
                 reply_topic,
                 group_id=None,
                 serialiser=None):
        self._kafka_broker_url = kafka_broker_url
        self._requests = {}

        self._request_topic = request_topic
        self._reply_topic = reply_topic
        self._group_id = group_id
        self._serialiser = serialiser if serialiser is not None else DefaultSerialiser(
        )

        self._producer = KafkaProducer(bootstrap_servers=[kafka_broker_url])

        self._consumer = KafkaConsumer(
            self._reply_topic,
            bootstrap_servers=[self._kafka_broker_url],
            auto_offset_reset='earliest',
            group_id=self._group_id)

        self._consumer_thread = Thread(target=self._consume, args=())
        self._consumer_thread.daemon = True
        self._consumer_thread.start()
    def test_pipeline_using_kwargs(self):
        pipeline = tasks.send(hello_workflow, first_name='Jane', last_name='Doe')
        proto = pipeline.to_proto(serialiser=DefaultSerialiser())
        self.assertEqual(proto.entries[0].task_entry.request.type_url, 'type.googleapis.com/statefun_tasks.ArgsAndKwargs')

        result = self.test_harness.run_pipeline(pipeline)
        self.assertEqual(result, 'Hello Jane Doe')
    def test_group_entry_serialisation(self):
        serialiser = DefaultSerialiser(known_proto_types=[Address])

        args = (1, '2', Address(namespace='test'))
        kwargs = {'arg': [1, 2, 3]}

        group_entry = Group(group_id='inner_group_id', max_parallelism=10)

        group_entry.add_to_group([
            Task.from_fields('inner_task_id_1', 'task_type', args, kwargs),
            Task.from_fields('inner_task_id_2', 'task_type', args, kwargs)
        ])

        entry = Group(group_id='group_id')
        entry.add_to_group([
            group_entry,
            Task.from_fields('grouped_task_chain_1_1', 'task_type', args,
                             kwargs),
            Task.from_fields('grouped_task_chain_1_2', 'task_type', args,
                             kwargs)
        ])

        entry.add_to_group([
            Task.from_fields('grouped_task_chain_2_1', 'task_type', args,
                             kwargs)
        ])

        proto = entry.to_proto(serialiser)
        reconsituted_entry = Group.from_proto(proto)
        self.assertEqual(str(reconsituted_entry), str(entry))
    def test_simple_protobuf_pipeline(self):
        pipeline = tasks.send(simple_protobuf_workflow, TestPerson(first_name='Jane', last_name='Doe'))
        proto = pipeline.to_proto(serialiser=DefaultSerialiser())

        self.assertEqual(proto.entries[0].task_entry.request.type_url, 'type.googleapis.com/tests.TestPerson')

        result = self.test_harness.run_pipeline(pipeline)
        self.assertEqual(result.greeting, 'Hello Jane Doe')
    def test_task_entry_serialisation_with_single_protobuf_arg(self):
        serialiser = DefaultSerialiser(known_proto_types=[Address])

        args = Address(namespace='test')
        entry = Task.from_fields('task_id', 'task_type', args, {}, True)

        entry_proto = entry.to_proto(serialiser)
        reconsituted_entry = Task.from_proto(entry_proto).unpack(serialiser)

        self.assertEqual(
            entry_proto.task_entry.request.type_url,
            'type.googleapis.com/io.statefun.sdk.reqreply.Address')
        self.assertEqual(reconsituted_entry.to_tuple(), entry.to_tuple())
    def test_task_entry_serialisation_with_single_protobuf_arg(self):
        serialiser = DefaultSerialiser(known_proto_types=[Address])

        args = Address(namespace='test')
        entry = _TaskEntry('task_id', 'task_type', args, {}, {}, True)

        entry_proto = entry.to_proto(serialiser)
        reconsituted_entry = _TaskEntry.from_proto(entry_proto, serialiser)

        self.assertEqual(
            entry_proto.task_entry.request.type_url,
            'type.googleapis.com/org.apache.flink.statefun.flink.core.polyglot.Address'
        )

        self.assertEqual(reconsituted_entry.args, entry.args)
        self.assertEqual(reconsituted_entry.kwargs, {})
    def test_task_entry_serialisation_with_task_retry_policy(self):
        serialiser = DefaultSerialiser(known_proto_types=[Address])

        args = ()
        kwargs = {}
        parameters = {
            'retry_policy':
            RetryPolicy(retry_for=[Exception, ValueError]).to_proto()
        }

        entry = _TaskEntry('task_id', 'task_type', args, kwargs, parameters)

        entry_proto = entry.to_proto(serialiser)
        reconsituted_entry = _TaskEntry.from_proto(entry_proto, serialiser)
        retry_policy = reconsituted_entry.get_parameter('retry_policy')
        self.assertEqual(['builtins.Exception', 'builtins.ValueError'],
                         retry_policy.retry_for)
Exemplo n.º 9
0
    def __init__(self,
                 kafka_broker_url,
                 request_topics,
                 action_topics,
                 reply_topic,
                 group_id=None,
                 serialiser=None,
                 kafka_properties=None,
                 kafka_consumer_properties=None,
                 kafka_producer_properties=None):

        self._kafka_broker_url = kafka_broker_url
        self._requests = {}

        self._request_topics = request_topics
        self._action_topics = action_topics
        self._reply_topic = reply_topic
        self._group_id = group_id
        self._serialiser = serialiser if serialiser is not None else DefaultSerialiser(
        )

        kafka_properties = kafka_properties or {}
        kafka_consumer_properties = kafka_consumer_properties or {}
        kafka_producer_properties = kafka_producer_properties or {}

        bootstrap_servers = [kafka_broker_url] if isinstance(
            kafka_broker_url, str) else kafka_broker_url
        self._producer = KafkaProducer(bootstrap_servers=bootstrap_servers,
                                       **{
                                           **kafka_properties,
                                           **kafka_producer_properties
                                       })

        self._consumer = KafkaConsumer(self._reply_topic,
                                       bootstrap_servers=bootstrap_servers,
                                       auto_offset_reset='earliest',
                                       group_id=self._group_id,
                                       **{
                                           **kafka_properties,
                                           **kafka_consumer_properties
                                       })

        self._consumer_thread = Thread(target=self._consume, args=())
        self._consumer_thread.daemon = True
        self._consumer_thread.start()
    def test_task_entry_serialisation_with_task_retry_policy(self):
        serialiser = DefaultSerialiser(known_proto_types=[Address])

        args = ()
        kwargs = {}
        retry_policy = RetryPolicy(
            retry_for=[Exception, ValueError]).to_proto()

        entry = Task.from_fields('task_id',
                                 'task_type',
                                 args,
                                 kwargs,
                                 retry_policy=retry_policy)

        entry_proto = entry.to_proto(serialiser)
        reconsituted_entry = Task.from_proto(entry_proto).unpack(serialiser)
        retry_policy = reconsituted_entry.retry_policy
        self.assertEqual(['builtins.Exception', 'builtins.ValueError'],
                         retry_policy.retry_for)
    def test_task_entry_serialisation(self):
        serialiser = DefaultSerialiser(known_proto_types=[Address])

        args = (1, '2', Address(namespace='test'))
        kwargs = {'arg': [1, 2, 3]}
        parameters = {'a_parameter': 'some_value'}

        entry = _TaskEntry('task_id', 'task_type', args, kwargs, parameters,
                           True)
        entry.mark_complete()

        entry_proto = entry.to_proto(serialiser)
        reconsituted_entry = _TaskEntry.from_proto(entry_proto, serialiser)

        self.assertEqual(reconsituted_entry.task_id, entry.task_id)
        self.assertEqual(reconsituted_entry.task_type, entry.task_type)
        self.assertEqual(reconsituted_entry.args, tuple(entry.args, ))
        self.assertEqual(reconsituted_entry.kwargs, kwargs)
        self.assertEqual(reconsituted_entry.parameters, parameters)
        self.assertEqual(reconsituted_entry.is_finally, True)
        self.assertEqual(reconsituted_entry.is_complete(), True)
Exemplo n.º 12
0
from statefun_tasks.client import TaskError
from statefun_tasks.protobuf import unpack_any

from statefun import RequestReplyHandler, StatefulFunctions
from statefun.kafka_egress_pb2 import KafkaProducerRecord
from statefun.request_reply_pb2 import FromFunction, ToFunction, Address, TypedValue

import asyncio
from typing import Union, Optional, List, NamedTuple
from google.protobuf.any_pb2 import Any

from .test_utils import update_address, update_state

default_namespace = 'test'
default_worker_name = 'worker'
test_harness_serialiser = DefaultSerialiser()

tasks = FlinkTasks(
    default_namespace=default_namespace,
    default_worker_name=default_worker_name,
    egress_type_name=f'{default_namespace}/kafka-generic-egress',
    serialiser=test_harness_serialiser)

other_tasks_instance = FlinkTasks(
    default_namespace=default_namespace,
    default_worker_name=default_worker_name,
    egress_type_name=f'{default_namespace}/kafka-generic-egress',
    serialiser=test_harness_serialiser)

functions = StatefulFunctions()
Exemplo n.º 13
0
import asyncio
from typing import Union, Optional, List, NamedTuple

from google.protobuf.any_pb2 import Any
from statefun import StatefulFunctions, AsyncRequestReplyHandler
from statefun.kafka_egress_pb2 import KafkaProducerRecord
from statefun.request_reply_pb2 import FromFunction, ToFunction, Address

from statefun_tasks import TaskRequest, TaskResult, TaskException, TaskActionRequest, TaskActionResult, TaskActionException, TaskAction, \
    PipelineBuilder, FlinkTasks, DefaultSerialiser
from statefun_tasks.client import TaskError
from ._test_utils import update_address, update_state, unpack_any

default_namespace = 'test'
default_worker_name = 'worker'
serialiser = DefaultSerialiser()
tasks = FlinkTasks(
    default_namespace=default_namespace,
    default_worker_name=default_worker_name,
    egress_type_name=f'{default_namespace}/kafka-generic-egress',
    serialiser=serialiser)

functions = StatefulFunctions()


@functions.bind('test/worker')
async def worker(context, task_data: Union[TaskRequest, TaskResult,
                                           TaskException, TaskActionRequest]):
    if tasks.is_async_required(task_data):
        await tasks.run_async(context, task_data)
    else: