def produce(self, **kwargs): """ Sends message to kafka by encoding with specified avro schema @:param: topic: topic name @:param: value: An object to serialize @:param: value_schema : Avro schema for value @:param: key: An object to serialize @:param: key_schema : Avro schema for key @:exception: SerializerError """ # get schemas from kwargs if defined key_schema = kwargs.pop('key_schema', self._key_schema) value_schema = kwargs.pop('value_schema', self._value_schema) topic = kwargs.pop('topic', None) if not topic: raise ClientError("Topic name not specified.") value = kwargs.pop('value', None) key = kwargs.pop('key', None) if value: if value_schema: value = self._serializer.encode_record_with_schema( topic, value_schema, value) else: raise ValueSerializerError("Avro schema required for values") if key: if key_schema: key = self._serializer.encode_record_with_schema( topic, key_schema, key, True) else: raise KeySerializerError("Avro schema required for key") super(AvroProducer, self).produce(topic, value, key, **kwargs)
def produce(self, **kwargs): """ Asynchronously sends message to Kafka by encoding with specified or default avro schema. :param str topic: topic name :param object value: An object to serialize :param str value_schema: Avro schema for value :param str key: An string to use as key Plus any other parameters accepted by confluent_kafka.Producer.produce :raises SerializerError: On serialization failure :raises BufferError: If producer queue is full. :raises KafkaException: For other produce failures. """ # get schemas from kwargs if defined value_schema = kwargs.pop('value_schema', self._value_schema) topic = kwargs.pop('topic', None) if not topic: raise ClientError("Topic name not specified.") value = kwargs.pop('value', None) key = kwargs.pop('key', None) if type(key) != str: raise ValueSerializerError("Messege key should be a str") if value is not None: if value_schema: value = self._serializer.encode_record_with_schema( topic, value_schema, value) else: raise ValueSerializerError("Avro schema required for values") self.default_producer.produce(topic, value, key)
def loads(schema_str): """ Parse a schema given a schema string """ try: if sys.version_info[0] < 3: return schema.parse(schema_str) else: return schema.Parse(schema_str) except schema.AvroException.SchemaParseException as e: raise ClientError("Schema parse failed: %s" % (str(e)))
def produce(self, **kwargs): """ Asynchronously sends message to Kafka by encoding with specified or default avro schema. :param str topic: topic name :param object value: An object to serialize :param str value_schema: Avro schema for value :param object key: An object to serialize :param str key_schema: Avro schema for key Plus any other parameters accepted by confluent_kafka.Producer.produce :raises SerializerError: On serialization failure :raises BufferError: If producer queue is full. :raises KafkaException: For other produce failures. """ # get schemas from kwargs if defined key_schema = kwargs.pop('key_schema', self._key_schema) value_schema = kwargs.pop('value_schema', self._value_schema) topic = kwargs.pop('topic', None) if not topic: raise ClientError("Topic name not specified.") value = kwargs.pop('value', None) key = kwargs.pop('key', None) if value is not None: if value_schema: value = self._serializer.encode_record_with_schema( topic, value_schema, value) else: raise ValueSerializerError("Avro schema required for values") if key is not None: if key_schema: key = self._serializer.encode_record_with_schema( topic, key_schema, key, True) else: raise KeySerializerError("Avro schema required for key") log.debug( "Message for topic {} has been serialized. Sending".format(topic)) super(AvroProducer, self).produce(topic, value, key, **kwargs)
def loads(schema_str): """ Parse a schema given a schema string """ try: return schema.parse(schema_str) except SchemaParseException as e: raise ClientError("Schema parse failed: %s" % (str(e)))