Пример #1
0
    def encode_offset_commit_request(cls, client_id, correlation_id,
                                     group, payloads):
        """
        Encode some OffsetCommitRequest structs

        Arguments:
            client_id: string
            correlation_id: int
            group: string, the consumer group you are committing offsets for
            payloads: list of OffsetCommitRequest
        """
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = []
        message.append(cls._encode_message_header(client_id, correlation_id,
                                                  KafkaProtocol.OFFSET_COMMIT_KEY))
        message.append(write_short_string(group))
        message.append(struct.pack('>i', len(grouped_payloads)))

        for topic, topic_payloads in grouped_payloads.items():
            message.append(write_short_string(topic))
            message.append(struct.pack('>i', len(topic_payloads)))

            for partition, payload in topic_payloads.items():
                message.append(struct.pack('>iq', partition, payload.offset))
                message.append(write_short_string(payload.metadata))

        msg = b''.join(message)
        return struct.pack('>i%ds' % len(msg), len(msg), msg)
Пример #2
0
    def encode_offset_fetch_request(cls, client_id, correlation_id, group,
                                    payloads):
        """
        Encode some OffsetFetchRequest structs

        Params
        ======
        client_id: string
        correlation_id: int
        group: string, the consumer group you are fetching offsets for
        payloads: list of OffsetFetchRequest
        """
        grouped_payloads = group_by_topic_and_partition(payloads)
        message = cls._encode_message_header(client_id, correlation_id,
                                             KafkaProtocol.OFFSET_FETCH_KEY)

        message += write_short_string(group)
        message += struct.pack('>i', len(grouped_payloads))

        for topic, topic_payloads in grouped_payloads.items():
            message += write_short_string(topic)
            message += struct.pack('>i', len(topic_payloads))

            for partition, payload in topic_payloads.items():
                message += struct.pack('>i', partition)

        return struct.pack('>i%ds' % len(message), len(message), message)
Пример #3
0
    def encode_offset_fetch_request(cls, client_id, correlation_id,
                                    group, payloads, from_kafka=False):
        """
        Encode some OffsetFetchRequest structs. The request is encoded using
        version 0 if from_kafka is false, indicating a request for Zookeeper
        offsets. It is encoded using version 1 otherwise, indicating a request
        for Kafka offsets.

        Arguments:
            client_id: string
            correlation_id: int
            group: string, the consumer group you are fetching offsets for
            payloads: list of OffsetFetchRequest
            from_kafka: bool, default False, set True for Kafka-committed offsets
        """
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = []
        reqver = 1 if from_kafka else 0
        message.append(cls._encode_message_header(client_id, correlation_id,
                                                  KafkaProtocol.OFFSET_FETCH_KEY,
                                                  version=reqver))

        message.append(write_short_string(group))
        message.append(struct.pack('>i', len(grouped_payloads)))

        for topic, topic_payloads in grouped_payloads.items():
            message.append(write_short_string(topic))
            message.append(struct.pack('>i', len(topic_payloads)))

            for partition, payload in topic_payloads.items():
                message.append(struct.pack('>i', partition))

        msg = b''.join(message)
        return struct.pack('>i%ds' % len(msg), len(msg), msg)
Пример #4
0
    def encode_offset_commit_request_kafka(cls, client_id, correlation_id,
                                           group, payloads):
        """
        Encode some OffsetCommitRequest structs
        Arguments:
            client_id: string
            correlation_id: int
            group: string, the consumer group you are committing offsets for
            payloads: list of OffsetCommitRequest
        """
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = []
        message.append(
            cls._encode_message_header(client_id,
                                       correlation_id,
                                       KafkaProtocol.OFFSET_COMMIT_KEY,
                                       version=2))
        message.append(write_short_string(group))
        message.append(struct.pack('>i', -1))  # ConsumerGroupGenerationId
        message.append(write_short_string(''))  # ConsumerId
        message.append(struct.pack('>q', -1))  # Retention time
        message.append(struct.pack('>i', len(grouped_payloads)))

        for topic, topic_payloads in grouped_payloads.items():
            message.append(write_short_string(topic))
            message.append(struct.pack('>i', len(topic_payloads)))

            for partition, payload in topic_payloads.items():
                message.append(struct.pack('>iq', partition, payload.offset))
                message.append(write_short_string(payload.metadata))

        msg = b''.join(message)
        return struct.pack('>i%ds' % len(msg), len(msg), msg)
Пример #5
0
    def encode_offset_fetch_request(cls, client_id, correlation_id, group, payloads):
        """
        Encode some OffsetFetchRequest structs

        Arguments:
            client_id: string
            correlation_id: int
            group: string, the consumer group you are fetching offsets for
            payloads: list of OffsetFetchRequest
        """
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = []
        message.append(cls._encode_message_header(client_id, correlation_id, KafkaProtocol.OFFSET_FETCH_KEY))

        message.append(write_short_string(group))
        message.append(struct.pack(">i", len(grouped_payloads)))

        for topic, topic_payloads in grouped_payloads.items():
            message.append(write_short_string(topic))
            message.append(struct.pack(">i", len(topic_payloads)))

            for partition, payload in topic_payloads.items():
                message.append(struct.pack(">i", partition))

        msg = b"".join(message)
        return struct.pack(">i%ds" % len(msg), len(msg), msg)
Пример #6
0
    def encode_offset_fetch_request(cls, client_id, correlation_id,
                                    group, payloads):
        """
        Encode some OffsetFetchRequest structs

        Params
        ======
        client_id: string
        correlation_id: int
        group: string, the consumer group you are fetching offsets for
        payloads: list of OffsetFetchRequest
        """
        grouped_payloads = group_by_topic_and_partition(payloads)
        message = cls._encode_message_header(client_id, correlation_id,
                                             KafkaProtocol.OFFSET_FETCH_KEY)

        message += write_short_string(group)
        message += struct.pack('>i', len(grouped_payloads))

        for topic, topic_payloads in grouped_payloads.items():
            message += write_short_string(topic)
            message += struct.pack('>i', len(topic_payloads))

            for partition, payload in topic_payloads.items():
                message += struct.pack('>i', partition)

        return struct.pack('>i%ds' % len(message), len(message), message)
Пример #7
0
    def encode_fetch_request(cls, client_id, correlation_id, payloads=None,
                             max_wait_time=100, min_bytes=4096):
        """
        Encodes some FetchRequest structs

        Arguments:
            client_id: string
            correlation_id: int
            payloads: list of FetchRequest
            max_wait_time: int, how long to block waiting on min_bytes of data
            min_bytes: int, the minimum number of bytes to accumulate before
                       returning the response
        """

        payloads = [] if payloads is None else payloads
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = []
        message.append(cls._encode_message_header(client_id, correlation_id,
                                                  KafkaProtocol.FETCH_KEY))

        # -1 is the replica id
        message.append(struct.pack('>iiii', -1, max_wait_time, min_bytes,
                                   len(grouped_payloads)))

        for topic, topic_payloads in grouped_payloads.items():
            message.append(write_short_string(topic))
            message.append(struct.pack('>i', len(topic_payloads)))
            for partition, payload in topic_payloads.items():
                message.append(struct.pack('>iqi', partition, payload.offset,
                                           payload.max_bytes))

        msg = b''.join(message)
        return struct.pack('>i%ds' % len(msg), len(msg), msg)
Пример #8
0
    def encode_fetch_request(cls, client_id, correlation_id, payloads=None,
                             max_wait_time=100, min_bytes=4096):
        """
        Encodes some FetchRequest structs

        Arguments:
            client_id: string
            correlation_id: int
            payloads: list of FetchRequest
            max_wait_time: int, how long to block waiting on min_bytes of data
            min_bytes: int, the minimum number of bytes to accumulate before
                       returning the response
        """

        payloads = [] if payloads is None else payloads
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = []
        message.append(cls._encode_message_header(client_id, correlation_id,
                                                  KafkaProtocol.FETCH_KEY))

        # -1 is the replica id
        message.append(struct.pack('>iiii', -1, max_wait_time, min_bytes,
                                   len(grouped_payloads)))

        for topic, topic_payloads in grouped_payloads.items():
            message.append(write_short_string(topic))
            message.append(struct.pack('>i', len(topic_payloads)))
            for partition, payload in topic_payloads.items():
                message.append(struct.pack('>iqi', partition, payload.offset,
                                           payload.max_bytes))

        msg = b''.join(message)
        return struct.pack('>i%ds' % len(msg), len(msg), msg)
Пример #9
0
 def encode_offset_request(cls, client_id, correlation_id, payloads=[]):
     grouped_payloads = group_by_topic_and_partition(payloads)
     message = cls._encode_message_header(client_id, correlation_id, KafkaProtocol.OFFSET_KEY)
     message += struct.pack('>ii', -1, len(grouped_payloads)) # -1 is the replica id
     for topic, topic_payloads in grouped_payloads.items():
         message += write_short_string(topic)
         message += struct.pack('>i', len(topic_payloads))
         for partition, payload in topic_payloads.items():
             message += struct.pack('>iqi', partition, payload.time, payload.max_offsets)
     return struct.pack('>i%ds' % len(message), len(message), message)
Пример #10
0
    def encode_offset_fetch_request(cls,
                                    client_id,
                                    correlation_id,
                                    group,
                                    payloads,
                                    from_kafka=False):
        """
        Encode some OffsetFetchRequest structs. The request is encoded using
        version 0 if from_kafka is false, indicating a request for Zookeeper
        offsets. It is encoded using version 1 otherwise, indicating a request
        for Kafka offsets.

        Arguments:
            client_id: string
            correlation_id: int
            group: string, the consumer group you are fetching offsets for
            payloads: list of OffsetFetchRequest
            from_kafka: bool, default False, set True for Kafka-committed offsets
        """
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = []
        reqver = 1 if from_kafka else 0
        message.append(
            cls._encode_message_header(client_id,
                                       correlation_id,
                                       KafkaProtocol.OFFSET_FETCH_KEY,
                                       version=reqver))

        message.append(write_short_string(group))
        message.append(struct.pack('>i', len(grouped_payloads)))

        for topic, topic_payloads in grouped_payloads.items():
            message.append(write_short_string(topic))
            message.append(struct.pack('>i', len(topic_payloads)))

            for partition, payload in topic_payloads.items():
                message.append(struct.pack('>i', partition))

        msg = b''.join(message)
        return struct.pack('>i%ds' % len(msg), len(msg), msg)
Пример #11
0
    def encode_offset_request(cls, client_id, correlation_id, payloads=None):
        payloads = [] if payloads is None else payloads
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = cls._encode_message_header(client_id, correlation_id,
                                             KafkaProtocol.OFFSET_KEY)

        # -1 is the replica id
        message += struct.pack('>ii', -1, len(grouped_payloads))

        for topic, topic_payloads in grouped_payloads.items():
            message += write_short_string(topic)
            message += struct.pack('>i', len(topic_payloads))

            for partition, payload in topic_payloads.items():
                message += struct.pack('>iqi', partition, payload.time,
                                       payload.max_offsets)

        return struct.pack('>i%ds' % len(message), len(message), message)
Пример #12
0
    def encode_offset_request(cls, client_id, correlation_id, payloads=None):
        payloads = [] if payloads is None else payloads
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = []
        message.append(cls._encode_message_header(client_id, correlation_id, KafkaProtocol.OFFSET_KEY))

        # -1 is the replica id
        message.append(struct.pack(">ii", -1, len(grouped_payloads)))

        for topic, topic_payloads in grouped_payloads.items():
            message.append(write_short_string(topic))
            message.append(struct.pack(">i", len(topic_payloads)))

            for partition, payload in topic_payloads.items():
                message.append(struct.pack(">iqi", partition, payload.time, payload.max_offsets))

        msg = b"".join(message)
        return struct.pack(">i%ds" % len(msg), len(msg), msg)
Пример #13
0
    def encode_fetch_request(cls, client_id, correlation_id, payloads=[], max_wait_time=100, min_bytes=4096):
        """
        Encodes some FetchRequest structs

        Params
        ======
        client_id: string
        correlation_id: string
        payloads: list of FetchRequest
        max_wait_time: int, how long to block waiting on min_bytes of data
        min_bytes: int, the minimum number of bytes to accumulate before returning the response
        """
        
        grouped_payloads = group_by_topic_and_partition(payloads)
        message = cls._encode_message_header(client_id, correlation_id, KafkaProtocol.FETCH_KEY)
        message += struct.pack('>iiii', -1, max_wait_time, min_bytes, len(grouped_payloads)) # -1 is the replica id
        for topic, topic_payloads in grouped_payloads.items():
            message += write_short_string(topic)
            message += struct.pack('>i', len(topic_payloads))
            for partition, payload in topic_payloads.items():
                message += struct.pack('>iqi', partition, payload.offset, payload.max_bytes)
        return struct.pack('>i%ds' % len(message), len(message), message)