def to_typed_value_any_state(proto_msg): any = Any() any.Pack(proto_msg) typed_value = TypedValue() typed_value.typename = "type.googleapis.com/google.protobuf.Any" typed_value.value = any.SerializeToString() return typed_value
def to_typed_value(proto_msg): any = Any() any.Pack(proto_msg) typed_value = TypedValue() typed_value.typename = any.type_url typed_value.value = any.value return typed_value
def kafka_egress_message(typename: str, topic: str, value: typing.Union[str, bytes, bytearray, int, float], value_type: Type = None, key: str = None): """ Build a message that can be emitted to a Kafka generic egress. If a value_type is provided, then @value will be serialized according to the provided value_type's serializer. Otherwise we will try to convert @value to bytes if it is one of: - utf-8 string - bytes - bytearray - an int (as defined by Kafka's serialization format) - float (as defined by Kafka's serialization format) :param typename: the target egress to emit to (as defined in the module.yaml) :param topic: The Kafka destination topic for that record :param key: the utf8 encoded string key to produce (can be empty) :param value: the value to produce :param value_type: an optional hint to this value type. :return: A Protobuf message representing the record to be produced via the Kafka generic egress. """ if not topic: raise ValueError("A destination Kafka topic is missing") if value is None: raise ValueError("Missing value") record = KafkaProducerRecord() record.topic = topic if value_type: ser = value_type.serializer() record.value_bytes = ser.serialize(value) elif isinstance(value, str): record.value_bytes = bytes(value, 'utf-8') elif isinstance(value, (bytes, bytearray)): record.value_bytes = bytes(value) elif isinstance(value, int): # see: # IntegerSerializer Javadoc # https://docs.confluent.io/current/clients/javadocs/org/apache/kafka/common/serialization/IntegerSerializer.html record.value_bytes = struct.pack('>i', value) elif isinstance(value, float): # see: # DoubleDeserializer Javadoc # https://docs.confluent.io/current/clients/javadocs/org/apache/kafka/common/serialization/DoubleDeserializer.html record.value_bytes = struct.pack('>d', value) else: raise TypeError("Unable to convert value to bytes.") if key is not None: record.key = key typed_value = TypedValue() typed_value.typename = "type.googleapis.com/io.statefun.sdk.egress.KafkaProducerRecord" typed_value.has_value = True typed_value.value = record.SerializeToString() return EgressMessage(typename, typed_value)
def kinesis_egress_message(typename: str, stream: str, value: typing.Union[str, bytes, bytearray], partition_key: str, value_type: typing.Union[None, Type] = None, explicit_hash_key: str = None): """ Build a message that can be emitted to a Kinesis generic egress. :param typename: the typename as specified in module.yaml :param stream: The AWS Kinesis destination stream for that record :param partition_key: the utf8 encoded string partition key to use :param value: the value to produce :param explicit_hash_key: a utf8 encoded string explicit hash key to use (can be empty) :param value_type: an optional hint to this value type :return: A Protobuf message representing the record to be produced to AWS Kinesis via the Kinesis generic egress. """ if not stream: raise ValueError("Missing destination Kinesis stream") if value is None: raise ValueError("Missing value") if partition_key is None: raise ValueError("Missing partition key") record = KinesisEgressRecord() record.stream = stream if value_type: ser = value_type.serializer() record.value_bytes = ser.serialize(value) elif isinstance(value, str): record.value_bytes = bytes(value, 'utf-8') elif isinstance(value, (bytes, bytearray)): record.value_bytes = bytes(value) else: raise TypeError("Unable to convert value to bytes.") record.partition_key = partition_key if explicit_hash_key is not None: record.explicit_hash_key = explicit_hash_key typed_value = TypedValue() typed_value.typename = "type.googleapis.com/io.statefun.sdk.egress.KinesisEgressRecord" typed_value.has_value = True typed_value.value = record.SerializeToString() return EgressMessage(typename, typed_value)
def to_typed_value(type, value): typed_value = TypedValue() typed_value.typename = type.typename if value is None: typed_value.has_value = False return typed_value typed_value.has_value = True ser = type.serializer() typed_value.value = ser.serialize(value) return typed_value
def _run_flink_loop(self, message_arg, target: Address, caller=None, egress_result=None): to_function = ToFunction() update_address(to_function.invocation.target, target.namespace, target.type, target.id) invocation = to_function.invocation.invocations.add() if caller: update_address(invocation.caller, caller.namespace, caller.type, caller.id) if isinstance(message_arg, TypedValue): # function calling function invocation.argument.CopyFrom(message_arg) else: flink_type = flink_value_type_for( message_arg ) # ingress protobuf needs to to be wrapped into a TypedValue invocation.argument.CopyFrom( TypedValue(typename=flink_type.typename, has_value=True, value=message_arg.SerializeToString())) self._copy_state_to_invocation(target.namespace, target.type, target.id, to_function) result_bytes = asyncio.get_event_loop().run_until_complete( handler.handle_async(to_function.SerializeToString())) result = self._process_result(to_function, result_bytes) # remember first egress result if result.egress_message is not None and egress_result is None: egress_result = result.egress_message # recurse while we have outgoing messages outgoing_messages = result.outgoing_messages for outgoing_message in outgoing_messages: egress_value = self._run_flink_loop( message_arg=outgoing_message.argument, target=outgoing_message.target, caller=target, egress_result=egress_result) if egress_value: return egress_value return egress_result