def decode_message_from_avro(self, message: Message): key_schema_id, decoded_key = self.decode_bytes(message.key()) value_schema_id, decoded_value = self.decode_bytes(message.value()) headers = [] if message.headers(): for header_key, header_value in message.headers(): headers.append( MessageHeader(key=header_key, value=header_value.decode("utf-8") if header_value else None)) decoded_message = DecodedAvroMessage(decoded_key, decoded_value, message.partition(), key_schema_id, value_schema_id, headers=headers) schema_version = it.count(1) serializable_message = { "key": decoded_message.key, "value": decoded_message.value, "partition": decoded_message.partition, "schema_directory_name": f"{next(schema_version):04}_{key_schema_id}_{value_schema_id}", } return key_schema_id, value_schema_id, decoded_message, serializable_message
def _process_message(self, msg: KafkaMessage): err = msg.error() if err: if err.code() == KafkaError._PARTITION_EOF: return None else: monitoring.got_counter("kafka_consumer_exception") params = { "code": err.code(), "pid": os.getpid(), "topic": msg.topic(), "partition": msg.partition(), "offset": msg.offset(), log_const.KEY_NAME: log_const.EXCEPTION_VALUE } log( "KafkaConsumer Error %(code)s at pid %(pid)s: topic=%(topic)s partition=[%(partition)s] " "reached end at offset %(offset)s\n", params=params, level="WARNING") raise KafkaException(err) if msg.value(): if msg.headers() is None: msg.set_headers([]) return msg
def decode_message(message: Message) -> DecodedMessage: if message.key() is None: decoded_key = None else: decoded_key = message.key().decode("utf-8") decoded_value = message.value().decode("utf-8") headers = [] if message.headers(): for header_key, header_value in message.headers(): headers.append(MessageHeader(key=header_key, value=header_value.decode("utf-8") if header_value else None)) return DecodedMessage( key=decoded_key, value=decoded_value, partition=message.partition(), offset=message.offset(), timestamp=str(message.timestamp()), headers=headers, )
def extract(self, carrier: Message) -> Optional[SpanContext]: header = carrier.headers() header_dict = {} if header: for el in header: if el[KafkaCodec.KEY_POS] in self.target_keys: header_dict[el[KafkaCodec.KEY_POS]] = el[ KafkaCodec.VALUE_POS].decode(KafkaCodec.ENCODING) span_context: SpanContext = None try: span_context = super().extract(header_dict) except Exception: self.__logger.exception( "Could not extract SpanContext. Using new one") return span_context