def consume( self ) -> Generator[Tuple[Optional[str], ProtobufMessage], None, None]: if self.consumer is None: raise BrokerException('Cannot consume. Consumer is None') while True: try: for message in self.consumer: try: logger.debug( f"Message on {message.topic} received. Message: {message}" ) try: protobuf = protoutils.load_message( message.topic, message.value) protoutils.load_meta(protobuf, message) yield message.key, protobuf except ValueError: # Fallback: try to parse json yield message.key, json.loads( message.value.decode('utf-8')) except Exception as e: raise BrokerException( f'Consuming message from topic {message.topic} failed. Message: {message}' ) from e except BrokerException: # If something went wrong while parsing, do not try to recover # because something went wrong with protobuf try: self.close() except Exception: pass raise except Exception as e: # If it is something connection related, try to recover if self.consumer is None: raise BrokerException( "Consuming message failed. Consumer is None") from e else: try: self.close() except Exception: pass self.consumer = None self.create_kafka_consumer() logger.error( "Consuming message failed. Consumer recreated.", exc_info=True) finally: sleep(0.0001)
def close(self) -> None: """Close connection of producer""" try: if self.producer is None: logger.warn('Couldn\'t close producer. Producer is None') else: self.producer.close() except Exception as e: raise BrokerException('Couldn\'t close producer') from e
def commit(self) -> None: """Commit the current offset manually""" try: with self._commit_lock: if self.consumer is None: logger.warn('Could\'t commit. Consumer is None') else: self.consumer.commit() logger.debug('Committed successfully') except Exception as e: raise BrokerException('Couldn\'t commit') from e
def send(self, protobuf: ProtobufMessage, key: Optional[str] = None) -> None: if self.producer is None: raise BrokerException("Sending message failed. Producer is None") protoutils.add_meta(protobuf) value = protoutils.serialize_message(protobuf) top = topic.get_topic_from_protobuf(protobuf) logger.debug(f"Send message on {top}. Key: {key}, Message: {protobuf}") self.producer.send(top, value=value, key=key)
def create_kafka_producer(self) -> None: try: self.producer = KafkaProducer( bootstrap_servers=self.kafka_server, sasl_mechanism='PLAIN', sasl_plain_password=self.kafka_password, sasl_plain_username=self.kafka_user, security_protocol='SASL_SSL', ssl_context=self.ssl_context, key_serializer=lambda x: None if x is None else x.encode('utf-8')) logger.info(f"Producer created") except Exception as e: raise BrokerException('Error while creating a producer') from e
def create_kafka_consumer(self) -> None: """Try creating a Kafka Consumer""" try: self.consumer = KafkaConsumer( auto_offset_reset=self.start_point, bootstrap_servers=self.kafka_server, enable_auto_commit=self.enable_auto_commit, group_id=self.group, sasl_mechanism='PLAIN', sasl_plain_password=self.kafka_password, sasl_plain_username=self.kafka_user, security_protocol='SASL_SSL', ssl_context=self.ssl_context, key_deserializer=lambda x: None if x is None else x.decode('utf-8')) if self.topics: self.consumer.subscribe(topics=self.topics) logger.info(f'Consumer created for topics: {self.topics}') else: self.consumer.subscribe(pattern=self.topic_pattern) logger.info( f'Consumer created for topics: {self.topic_pattern}') except Exception as e: raise BrokerException('Error while creating a consumer') from e
def flush(self) -> None: if self.producer is None: raise BrokerException("Flushing failed. Producer is None") self.producer.flush()
def instance(cls) -> GeneralProducer: if cls._instance is None: raise BrokerException( f"Class instance not initialized. Call {cls.__name__}.init_module first" ) return cls._instance