def to_message( self, key: K, value: V, *, partition: Optional[int] = None, offset: int = 0, timestamp: float = None, timestamp_type: int = 0, headers: HeadersArg = None, ) -> Message: try: topic_name = self._get_strtopic(self.original_channel) except ValueError: topic_name = "<internal>" return Message( topic=topic_name, partition=partition or 0, offset=offset, timestamp=timestamp or time(), timestamp_type=timestamp_type, headers=headers, key=key, value=value, checksum=b"", serialized_key_size=0, serialized_value_size=0, )
async def send(self, topic: str, key: Optional[bytes], value: Optional[bytes], partition: Optional[int], timestamp: Optional[float], headers: Optional[HeadersArg]) -> RecordMetadata: """Deliver message to consumer.""" if partition is None: partition = 0 message = Message( topic, partition=partition, offset=0, timestamp=timestamp or time(), timestamp_type=1 if timestamp else 0, headers=headers, key=key, value=value, checksum=None, serialized_key_size=len(key) if key else 0, serialized_value_size=len(value) if value else 0, ) self._messages[topic].append(message) self._messages_ready.set() return RecordMetadata( topic=topic, partition=partition, topic_partition=message.tp, offset=0, )
async def send(self, topic: str, key: Optional[bytes], value: Optional[bytes], partition: Optional[int]) -> RecordMetadata: if partition is None: partition = 0 message = Message( topic, partition=partition, offset=0, timestamp=time(), timestamp_type='unix', key=key, value=value, checksum=None, serialized_key_size=len(key) if key else 0, serialized_value_size=len(value) if value else 0, ) self._messages[topic].append(message) self._messages_ready.set() return RecordMetadata( topic=topic, partition=partition, topic_partition=message.tp, offset=0, )
def on_message_in(self, tp: TP, offset: int, message: Message) -> None: # WARNING: Sensors must never keep a reference to the Message, # as this means the message won't go out of scope! self.messages_received_total += 1 self.messages_active += 1 self.messages_received_by_topic[tp.topic] += 1 message.time_in = self.time()
def on_message_out(self, tp: TP, offset: int, message: Message) -> None: self.messages_active -= 1 time_out = message.time_out = self.time() time_in = message.time_in if time_in is not None: message.time_total = time_out - time_in
def event(): message = Message(topic='test-topic', key='key', value='value', partition=3, offset=0, checksum=None, timestamp=datetime.datetime.now().timestamp(), timestamp_type=0) return Event(app='test-app', key='key', value='value', message=message)
def event(): message = Message( topic="test-topic", key="key", value="value", partition=3, offset=0, checksum=None, timestamp=datetime.datetime.now().timestamp(), timestamp_type=0, headers={}, ) return Event( app="test-app", key="key", value="value", headers={}, message=message, )
def ack(self, message: Message) -> bool: if not message.acked: message.acked = True tp = message.tp offset = message.offset if self.app.topics.acks_enabled_for(message.topic): committed = self._committed_offset[tp] try: if committed is None or offset > committed: acked_index = self._acked_index[tp] if offset not in acked_index: self._unacked_messages.discard(message) acked_index.add(offset) acked_for_tp = self._acked[tp] acked_for_tp.append(offset) self._n_acked += 1 return True finally: notify(self._waiting_for_ack) return False
async def on_message(message: Message) -> None: # when a message is received we find all channels # that subscribe to this message await acquire_flow_control() channels_n = len_(channels) if channels_n: # we increment the reference count for this message in bulk # immediately, so that nothing will get a chance to decref to # zero before we've had the chance to pass it to all channels message.incref(channels_n) event: Optional[EventT] = None event_keyid: Optional[Tuple[K, V]] = None # forward message to all channels subscribing to this topic # keep track of the number of channels we delivered to, # so that if a DecodeError is raised we can propagate # that errors to the remaining channels. delivered: Set[_Topic] = set() full: List[Tuple[EventT, _Topic]] = [] try: for chan in channels: keyid = chan.key_type, chan.value_type if event is None: # first channel deserializes the payload: event = await chan.decode(message, propagate=True) event_keyid = keyid queue = chan.queue if queue.full(): full.append((event, chan)) continue queue.put_nowait(event) else: # subsequent channels may have a different # key/value type pair, meaning they all can # deserialize the message in different ways dest_event: EventT if keyid == event_keyid: # Reuse the event if it uses the same keypair: dest_event = event else: dest_event = await chan.decode(message, propagate=True) queue = chan.queue if queue.full(): full.append((dest_event, chan)) continue queue.put_nowait(dest_event) delivered.add(chan) if full: for _, dest_chan in full: on_topic_buffer_full(dest_chan) await asyncio.wait( [ dest_chan.put(dest_event) for dest_event, dest_chan in full ], return_when=asyncio.ALL_COMPLETED, ) except KeyDecodeError as exc: remaining = channels - delivered message.ack(app.consumer, n=len(remaining)) for channel in remaining: await channel.on_key_decode_error(exc, message) delivered.add(channel) except ValueDecodeError as exc: remaining = channels - delivered message.ack(app.consumer, n=len(remaining)) for channel in remaining: await channel.on_value_decode_error(exc, message) delivered.add(channel)