async def _process_message(self, msg: Message): """ De-serialize message and execute service. :param msg: Kafka message. :type msg: confluent_kafka.Message` """ LOGGER.info( 'Processing Message(topic={}, partition={}, offset={}) ...'.format( msg.topic, msg.partition, msg.offset)) service_repr = get_call_repr(self._service) LOGGER.info('Executing job {}'.format(service_repr)) try: message_value = _decode_msg_value(msg.value()) res = await self._exec_service(message_value) except KeyboardInterrupt: LOGGER.error('Job was interrupted: {}'.format(msg.offset())) except Exception as err: LOGGER.exception('Job {} raised an exception: {}'.format( msg.offset(), err)) await self._producer.produce(topic=self._failed_topic, value=msg.value(), error=str(err)) else: LOGGER.info('Job {} returned: {}'.format(msg.offset(), res))
def decode_message_from_avro(self, message: Message): key_schema_id, decoded_key = self.decode_bytes(message.key()) value_schema_id, decoded_value = self.decode_bytes(message.value()) headers = [] if message.headers(): for header_key, header_value in message.headers(): headers.append( MessageHeader(key=header_key, value=header_value.decode("utf-8") if header_value else None)) decoded_message = DecodedAvroMessage(decoded_key, decoded_value, message.partition(), key_schema_id, value_schema_id, headers=headers) schema_version = it.count(1) serializable_message = { "key": decoded_message.key, "value": decoded_message.value, "partition": decoded_message.partition, "schema_directory_name": f"{next(schema_version):04}_{key_schema_id}_{value_schema_id}", } return key_schema_id, value_schema_id, decoded_message, serializable_message
def _process_message(self, msg: KafkaMessage): err = msg.error() if err: if err.code() == KafkaError._PARTITION_EOF: return None else: monitoring.got_counter("kafka_consumer_exception") params = { "code": err.code(), "pid": os.getpid(), "topic": msg.topic(), "partition": msg.partition(), "offset": msg.offset(), log_const.KEY_NAME: log_const.EXCEPTION_VALUE } log( "KafkaConsumer Error %(code)s at pid %(pid)s: topic=%(topic)s partition=[%(partition)s] " "reached end at offset %(offset)s\n", params=params, level="WARNING") raise KafkaException(err) if msg.value(): if msg.headers() is None: msg.set_headers([]) return msg
async def _process_message(self, msg: Message): """ De-serialize message and execute service. :param msg: Kafka message. :type msg: confluent_kafka.Message` """ LOGGER.info( 'Processing Message(topic={}, partition={}, offset={}) ...'.format( msg.topic, msg.partition, msg.offset)) errors = [] service_repr = get_call_repr(self._service) # TODO get schedule from args # First element in tuple is count, the second is timeout in sec. schedule = [(3, 5), (2, 60), (2, 60 * 5), (1, 60 * 60), (3, 60 * 60 * 3)] LOGGER.info('Executing job {}'.format(service_repr)) try: message_value = _decode_msg_value(msg.value()) for count, timeout in schedule: res, exec_error = await self._retry_exec( count, timeout, message_value) if res: LOGGER.info( 'Successful re-processing of the message {} by the service {}' .format(message_value, self.service)) break else: errors.extend(exec_error) if len(errors) > 0: await self._producer.produce(topic=self._failed_topic, value=msg.value(), error=str(errors)) except KeyboardInterrupt: LOGGER.error('Job was interrupted: {}'.format(msg.offset())) except Exception as err: LOGGER.exception('Job {} raised an exception: {}'.format( msg.offset(), err)) errors.append(err) # For handle analyse and process message await self._producer.produce(topic=self._failed_topic, value=msg.value(), error=str(errors)) else: LOGGER.info('Job {} finished'.format(msg.offset()))
def delivery_success_callback(msg: Message): pprint({ 'topic': msg.topic(), 'partition': msg.partition(), 'timestamp': msg.timestamp(), 'key': msg.key(), 'value': msg.value(), })
def update_callback(self, err: Optional[cimpl.KafkaError], msg: cimpl.Message): assert err is None, f"Received KafkaError {err}." self.binary_value = msg.value() self.binary_key = msg.key() self.partition = msg.partition() self.offset = msg.offset() self.timestamp = msg.timestamp()[1]
def _message_produced(self, error: KafkaError, message: Message) -> None: """TODO""" topic = message.topic() msg = message.value().decode(Charset.UTF_8.value) if error is not None: print(f"Failed to deliver message: {msg}: {error.str()}") else: self.bus.emit(MSG_PROD_EVT, message=msg, topic=topic)
def _deserializer_msg(self, msg: Message): if self._value_deserializer: try: msg.set_value(self._value_deserializer(msg.value())) except Exception as se: raise ValueDeserializationError(exception=se, message=msg) if self._key_deserializer: try: msg.set_key(self._key_deserializer(msg.key())) except Exception as se: raise KeyDeserializationError(exception=se, message=msg) return msg
def decode_message(message: Message) -> DecodedMessage: if message.key() is None: decoded_key = None else: decoded_key = message.key().decode("utf-8") decoded_value = message.value().decode("utf-8") headers = [] if message.headers(): for header_key, header_value in message.headers(): headers.append(MessageHeader(key=header_key, value=header_value.decode("utf-8") if header_value else None)) return DecodedMessage( key=decoded_key, value=decoded_value, partition=message.partition(), offset=message.offset(), timestamp=str(message.timestamp()), headers=headers, )
def write_message_to_file(self, message: Message): key_schema_id, decoded_key = self.decode_bytes(message.key()) value_schema_id, decoded_value = self.decode_bytes(message.value()) decoded_message = DecodedAvroMessage(decoded_key, decoded_value, message.partition(), key_schema_id, value_schema_id) if self.schema_changed( decoded_message) or self.schema_dir_name is None: self.schema_dir_name = f"{next(self.schema_version):04}_{key_schema_id}_{value_schema_id}" self.current_key_schema_id = key_schema_id self.current_value_schema_id = value_schema_id self._dump_schemata(key_schema_id, value_schema_id) serializable_message = { "key": decoded_message.key, "value": decoded_message.value, "partition": decoded_message.partition, "schema_directory_name": self.schema_dir_name, } pickle.dump(serializable_message, self.file)
def decode_message(message: Message) -> DecodedMessage: decoded_key = message.key().decode("utf-8") decoded_value = message.value().decode("utf-8") return DecodedMessage(decoded_key, decoded_value, message.partition())
def on_message_received(self, msg: Message) -> None: # noinspection PyArgumentList self.message_buffer.append((msg.key(), msg.value())) self.__idle_count = 0