def test_produce_many_simple(simple_client, topic):
    """Test multiple produces using the SimpleClient
    """
    start_offset = current_offset(simple_client, topic, 0)

    assert_produce_request(
        simple_client,
        topic,
        [
            create_message(("Test message %d" % i).encode('utf-8'))
            for i in range(100)
        ],
        start_offset,
        100,
    )

    assert_produce_request(
        simple_client,
        topic,
        [
            create_message(("Test message %d" % i).encode('utf-8'))
            for i in range(100)
        ],
        start_offset + 100,
        100,
    )
Esempio n. 2
0
    def test_produce_many_simple(self):
        start_offset = yield from self.current_offset(self.topic, 0)
        msgs1 = [create_message(("Test message %d" % i).encode('utf-8'))
                 for i in range(100)]

        yield from self.assert_produce_request(msgs1, start_offset, 100)

        msgs2 = [create_message(("Test message %d" % i).encode('utf-8'))
                 for i in range(100)]
        yield from self.assert_produce_request(msgs2, start_offset + 100, 100)
Esempio n. 3
0
    def test_produce_many_simple(self):
        start_offset = yield from self.current_offset(self.topic, 0)
        msgs1 = [
            create_message(("Test message %d" % i).encode('utf-8'))
            for i in range(100)
        ]

        yield from self.assert_produce_request(msgs1, start_offset, 100)

        msgs2 = [
            create_message(("Test message %d" % i).encode('utf-8'))
            for i in range(100)
        ]
        yield from self.assert_produce_request(msgs2, start_offset + 100, 100)
    def send_messages(self, partition, messages):
        messages = [ create_message(self.msg(str(msg))) for msg in messages ]
        produce = ProduceRequest(self.topic, partition, messages = messages)
        resp, = self.client.send_produce_request([produce])
        self.assertEqual(resp.error, 0)

        return [ x.value for x in messages ]
    def test_huge_messages(self):
        huge_message, = self.send_messages(0, [
            create_message(random_string(MAX_FETCH_BUFFER_SIZE_BYTES + 10)),
        ])

        # Create a consumer with the default buffer size
        consumer = self.consumer()

        # This consumer fails to get the message
        with self.assertRaises(ConsumerFetchSizeTooSmall):
            consumer.get_message(False, 0.1)

        consumer.stop()

        # Create a consumer with no fetch size limit
        big_consumer = self.consumer(
            max_buffer_size = None,
            partitions = [0],
        )

        # Seek to the last message
        big_consumer.seek(-1, 2)

        # Consume giant message successfully
        message = big_consumer.get_message(block=False, timeout=10)
        self.assertIsNotNone(message)
        self.assertEqual(message.message.value, huge_message)

        big_consumer.stop()
Esempio n. 6
0
    def test_huge_messages(self):
        h_msg = create_message(random_string(MAX_FETCH_BUFFER_SIZE_BYTES + 10))
        (huge_message,) = yield from self.send_messages(0, [h_msg])

        # Create a consumer_factory with the default buffer size
        consumer = yield from self.consumer_factory()

        # This consumer_factory failes to get the message
        with self.assertRaises(ConsumerFetchSizeTooSmall):
            yield from consumer.get_messages(1)
        yield from consumer.stop()

        # Create a consumer_factory with no fetch size limit
        big_consumer = yield from self.consumer_factory(
            max_buffer_size=None, partitions=[0])

        # Seek to the last message
        # TODO: fix or remove
        # yield from big_consumer.seek(-1, 2)

        # Consume giant message successfully
        (message,) = yield from big_consumer.get_messages(1)
        self.assertIsNotNone(message)
        self.assertEquals(message.message.value, huge_message)
        yield from big_consumer.stop()
    def test_huge_messages(self):
        huge_message, = self.send_messages(0, [
            create_message(random_string(MAX_FETCH_BUFFER_SIZE_BYTES + 10)),
        ])

        # Create a consumer with the default buffer size
        consumer = self.consumer()

        # This consumer failes to get the message
        with self.assertRaises(ConsumerFetchSizeTooSmall):
            consumer.get_message(False, 0.1)

        consumer.stop()

        # Create a consumer with no fetch size limit
        big_consumer = self.consumer(
            max_buffer_size = None,
            partitions = [0],
        )

        # Seek to the last message
        big_consumer.seek(-1, 2)

        # Consume giant message successfully
        message = big_consumer.get_message(block=False, timeout=10)
        self.assertIsNotNone(message)
        self.assertEqual(message.message.value, huge_message)

        big_consumer.stop()
    def send_messages(self, partition, messages):
        messages = [ create_message(self.msg(str(msg))) for msg in messages ]
        produce = ProduceRequestPayload(self.topic, partition, messages = messages)
        resp, = self.client.send_produce_request([produce])
        self.assertEqual(resp.error, 0)

        return [ x.value for x in messages ]
Esempio n. 9
0
    def test_huge_messages(self):
        h_msg = create_message(random_string(MAX_FETCH_BUFFER_SIZE_BYTES + 10))
        (huge_message, ) = yield from self.send_messages(0, [h_msg])

        # Create a consumer_factory with the default buffer size
        consumer = yield from self.consumer_factory()

        # This consumer_factory failes to get the message
        with self.assertRaises(ConsumerFetchSizeTooSmall):
            yield from consumer.get_messages(1)
        yield from consumer.stop()

        # Create a consumer_factory with no fetch size limit
        big_consumer = yield from self.consumer_factory(max_buffer_size=None,
                                                        partitions=[0])

        # Seek to the last message
        # TODO: fix or remove
        # yield from big_consumer.seek(-1, 2)

        # Consume giant message successfully
        (message, ) = yield from big_consumer.get_messages(1)
        self.assertIsNotNone(message)
        self.assertEquals(message.message.value, huge_message)
        yield from big_consumer.stop()
    def test_produce_many_simple(self):
        start_offset = self.current_offset(self.topic, 0)

        self.assert_produce_request(
            [create_message(("Test message %d" % i).encode('utf-8'))
             for i in range(100)],
            start_offset,
            100,
        )

        self.assert_produce_request(
            [create_message(("Test message %d" % i).encode('utf-8'))
             for i in range(100)],
            start_offset+100,
            100,
        )
    def test_produce_many_simple(self):
        start_offset = self.current_offset(self.topic, 0)

        self.assert_produce_request(
            [create_message(("Test message %d" % i).encode('utf-8'))
             for i in range(100)],
            start_offset,
            100,
        )

        self.assert_produce_request(
            [create_message(("Test message %d" % i).encode('utf-8'))
             for i in range(100)],
            start_offset+100,
            100,
        )
Esempio n. 12
0
def send_messages(client, topic, partition, messages):
    """Send messages to a topic's partition
    """
    messages = [create_message(msg(str(m))) for m in messages]
    produce = ProduceRequestPayload(topic, partition, messages=messages)
    resp, = client.send_produce_request([produce])
    assert resp.error == 0

    return [x.value for x in messages]
Esempio n. 13
0
def send_messages(client, topic, partition, messages):
    """Send messages to a topic's partition
    """
    messages = [create_message(msg(str(m))) for m in messages]
    produce = ProduceRequestPayload(topic, partition, messages=messages)
    resp, = client.send_produce_request([produce])
    assert resp.error == 0

    return [x.value for x in messages]
def test_produce_many_simple(simple_client, topic):
    """Test multiple produces using the SimpleClient
    """
    start_offset = current_offset(simple_client, topic, 0)

    assert_produce_request(
        simple_client, topic,
        [create_message(("Test message %d" % i).encode('utf-8'))
         for i in range(100)],
        start_offset,
        100,
    )

    assert_produce_request(
        simple_client, topic,
        [create_message(("Test message %d" % i).encode('utf-8'))
         for i in range(100)],
        start_offset+100,
        100,
    )
Esempio n. 15
0
def _prepare(envelope_and_message):
    try:
        kwargs = {}
        if envelope_and_message.message.keys:
            kwargs['key'] = envelope_and_message.message.encoded_keys
        return create_message(
            envelope_and_message.envelope.pack(envelope_and_message.message),
            **kwargs)
    except:
        logger.exception('Prepare failed')
        raise
Esempio n. 16
0
def _prepare(envelope_and_message):
    try:
        kwargs = {}
        if envelope_and_message.message.keys:
            kwargs['key'] = envelope_and_message.message.encoded_keys
        return create_message(
            envelope_and_message.envelope.pack(envelope_and_message.message),
            **kwargs
        )
    except:
        logger.exception('Prepare failed')
        raise
Esempio n. 17
0
    def test_produce_mixed(self):
        start_offset = yield from self.current_offset(self.topic, 0)

        msg_count = 1 + 100 + 100
        messages = [
            create_message(b"Just a plain message"),
            create_gzip_message([("Gzipped %d" % i).encode('utf-8')
                                 for i in range(100)]),
            create_snappy_message([b"Snappy " + bytes(i) for i in range(100)])
        ]

        yield from self.assert_produce_request(messages, start_offset,
                                               msg_count)
Esempio n. 18
0
    def test_produce_mixed(self):
        start_offset = yield from self.current_offset(self.topic, 0)

        msg_count = 1 + 100 + 100
        messages = [
            create_message(b"Just a plain message"),
            create_gzip_message([
                ("Gzipped %d" % i).encode('utf-8') for i in range(100)]),
            create_snappy_message([
                b"Snappy " + bytes(i) for i in range(100)])]

        yield from self.assert_produce_request(messages, start_offset,
                                               msg_count)
Esempio n. 19
0
    def test_produce_mixed(self):
        start_offset = self.current_offset(self.topic, 0)

        msg_count = 1+100
        messages = [
            create_message(b"Just a plain message"),
            create_gzip_message([
                (("Gzipped %d" % i).encode('utf-8'), None) for i in range(100)]),
        ]

        # All snappy integration tests fail with nosnappyjava
        if False and has_snappy():
            msg_count += 100
            messages.append(create_snappy_message([("Snappy %d" % i, None) for i in range(100)]))

        self.assert_produce_request(messages, start_offset, msg_count)
Esempio n. 20
0
    def test_create_from_offset_and_message_with_no_reader_schema_specified(
            self, registered_schema, payload, example_payload_data):
        unpacked_message = CreateMessage(
            schema_id=registered_schema.schema_id,
            payload=payload,
            timestamp=1500,
        )
        offset_and_message = OffsetAndMessage(
            0, create_message(Envelope().pack(unpacked_message)))

        extracted_message = create_from_offset_and_message(
            offset_and_message=offset_and_message, reader_schema_id=None)
        assert extracted_message.schema_id == registered_schema.schema_id
        assert extracted_message.topic == registered_schema.topic.name
        assert extracted_message.reader_schema_id == registered_schema.schema_id
        assert extracted_message.payload_data == example_payload_data
    def test_produce_mixed(self):
        start_offset = self.current_offset(self.topic, 0)

        msg_count = 1+100
        messages = [
            create_message(b"Just a plain message"),
            create_gzip_message([
                (("Gzipped %d" % i).encode('utf-8'), None) for i in range(100)]),
        ]

        # All snappy integration tests fail with nosnappyjava
        if False and has_snappy():
            msg_count += 100
            messages.append(create_snappy_message([("Snappy %d" % i, None) for i in range(100)]))

        self.assert_produce_request(messages, start_offset, msg_count)
    def produce_messages(self):
        """
        Produce sample messages
        """
        # TODO: Support different kafka port
        kafka = KafkaClient(self.config.kafka_host)

        total_messages = self.batches * self.batch_size
        messages_batch = [create_message(random.choice(self.sample_messages)) for r in range(self.batch_size)]

        for i in range(self.batches):
            req = ProduceRequest(topic=self.config.kafka_topic, partition=0, messages=messages_batch)
            resps = kafka.send_produce_request(payloads=[req], fail_on_error=True)
            sent_messages = i * self.batch_size
            logging.info('Created {} out of {} sample messages'.format(sent_messages, total_messages))
        kafka.close()
Esempio n. 23
0
def low_level():
    '''low level'''
    from kafka import KafkaClient, create_message
    from kafka.protocol import KafkaProtocol
    from kafka.common import ProduceRequest

    kafka = KafkaClient(KAFKA_SERVER)

    req = ProduceRequest(topic=b'topic1', partition=1,
                         messages=[create_message(b'some message')])
    resps = kafka.send_produce_request(payloads=[req], fail_on_error=True)
    kafka.close()

    print resps[0].topic      # b'topic1'
    print resps[0].partition  # 1
    print resps[0].error      # 0 (hopefully)
    print resps[0].offset     # offset of the first message sent in this request
    def produce_messages(self):
        """
        Produce sample messages
        """
        # TODO: Support different kafka port
        kafka = KafkaClient(self.config.kafka_host)

        total_messages = self.batches * self.batch_size
        messages_batch = [
            create_message(random.choice(self.sample_messages))
            for _ in xrange(self.batch_size)
        ]

        for i in range(self.batches):
            # TODO: Support writing to all partitions
            req = ProduceRequest(topic=self.config.kafka_topic,
                                 partition=0,
                                 messages=messages_batch)
            kafka.send_produce_request(payloads=[req], fail_on_error=True)
            sent_messages = i * self.batch_size
            logging.info('Created %s out of %s sample messages', sent_messages,
                         total_messages)
        kafka.close()
Esempio n. 25
0
    def test_create_from_offset_and_message_with_no_reader_schema_specified(
        self,
        registered_schema,
        payload,
        example_payload_data
    ):
        unpacked_message = CreateMessage(
            schema_id=registered_schema.schema_id,
            payload=payload,
            timestamp=1500,
        )
        offset_and_message = OffsetAndMessage(
            0,
            create_message(Envelope().pack(unpacked_message))
        )

        extracted_message = create_from_offset_and_message(
            offset_and_message=offset_and_message,
            reader_schema_id=None
        )
        assert extracted_message.schema_id == registered_schema.schema_id
        assert extracted_message.topic == registered_schema.topic.name
        assert extracted_message.reader_schema_id == registered_schema.schema_id
        assert extracted_message.payload_data == example_payload_data
 def _create_random_messages(messages, count):
     return [
         kafka.create_message(random.choice(messages)) for _ in range(count)
     ]
Esempio n. 27
0
 def prepare_message(self, message):
     return create_message(Envelope().pack(message)).value
 def _create_random_messages(messages, count):
     return [kafka.create_message(random.choice(messages)) for _ in range(count)]
Esempio n. 29
0
 def offset_and_message(self, message):
     return OffsetAndMessage(0, create_message(Envelope().pack(message)))
Esempio n. 30
0
 def prepare_message(self, message):
     return create_message(Envelope().pack(message)).value
Esempio n. 31
0
 def offset_and_message(self, message):
     return OffsetAndMessage(0, create_message(Envelope().pack(message)))