async def add_message(
        self, tp, key, value, timeout, timestamp_ms=None,
        headers=[]
    ):
        """ Add message to batch by topic-partition
        If batch is already full this method waits (`timeout` seconds maximum)
        until batch is drained by send task
        """
        if self._closed:
            # this can happen when producer is closing but try to send some
            # messages in async task
            raise ProducerClosed()
        if self._exception is not None:
            raise copy.copy(self._exception)

        pending_batches = self._batches.get(tp)
        if not pending_batches:
            builder = self.create_builder()
            batch = self._append_batch(builder, tp)
        else:
            batch = pending_batches[-1]

        future = batch.append(key, value, timestamp_ms, headers=headers)
        if future is None:
            # Batch is full, can't append data atm,
            # waiting until batch per topic-partition is drained
            start = self._loop.time()
            await batch.wait_drain(timeout)
            timeout -= self._loop.time() - start
            if timeout <= 0:
                raise KafkaTimeoutError()
            return (await self.add_message(
                tp, key, value, timeout, timestamp_ms))
        return future
    async def add_batch(self, builder, tp, timeout):
        """Add BatchBuilder to queue by topic-partition.

        Arguments:
            builder (BatchBuilder): batch object to enqueue.
            tp (TopicPartition): topic and partition to enqueue this batch for.
            timeout (int): time in seconds to wait for a free slot in the batch
                queue.

        Returns:
            MessageBatch: delivery wrapper around the BatchBuilder object.

        Raises:
            aiokafka.errors.ProducerClosed: the accumulator has already been
                closed and flushed.
            aiokafka.errors.KafkaTimeoutError: the batch could not be added
                within the specified timeout.
        """
        if self._closed:
            raise ProducerClosed()
        if self._exception is not None:
            raise copy.copy(self._exception)

        start = self._loop.time()
        while timeout > 0:
            pending = self._batches.get(tp)
            if pending:
                await pending[-1].wait_drain(timeout=timeout)
                timeout -= self._loop.time() - start
            else:
                batch = self._append_batch(builder, tp)
                return asyncio.shield(batch.future, loop=self._loop)
        raise KafkaTimeoutError()
Example #3
0
    def add_message(self, tp, key, value, timeout):
        """ Add message to batch by topic-partition
        If batch is already full this method waits (`ttl` seconds maximum)
        until batch is drained by send task
        """
        if self._closed:
            # this can happen when producer is closing but try to send some
            # messages in async task
            raise ProducerClosed()

        batch = self._batches.get(tp)
        if not batch:
            batch = MessageBatch(tp, self._batch_size, self._compression_type,
                                 self._batch_ttl, self._api_version,
                                 self._loop)
            self._batches[tp] = batch

            if not self._wait_data_future.done():
                # Wakeup sender task if it waits for data
                self._wait_data_future.set_result(None)

        future = batch.append(key, value)
        if future is None:
            # Batch is full, can't append data atm,
            # waiting until batch per topic-partition is drained
            start = self._loop.time()
            yield from asyncio.wait([batch.wait_drain()],
                                    timeout=timeout,
                                    loop=self._loop)
            timeout -= self._loop.time() - start
            if timeout <= 0:
                raise KafkaTimeoutError()
            return (yield from self.add_message(tp, key, value, timeout))
        return future