Example #1
0
    def encode_offset_fetch_request(cls, client_id, correlation_id,
                                    group, payloads, from_kafka=False):
        """
        Encode some OffsetFetchRequest structs. The request is encoded using
        version 0 if from_kafka is false, indicating a request for Zookeeper
        offsets. It is encoded using version 1 otherwise, indicating a request
        for Kafka offsets.

        Arguments:
            client_id: string
            correlation_id: int
            group: string, the consumer group you are fetching offsets for
            payloads: list of OffsetFetchRequest
            from_kafka: bool, default False, set True for Kafka-committed offsets
        """
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = []
        reqver = 1 if from_kafka else 0
        message.append(cls._encode_message_header(client_id, correlation_id,
                                                  KafkaProtocol.OFFSET_FETCH_KEY,
                                                  version=reqver))

        message.append(write_short_string(group))
        message.append(struct.pack('>i', len(grouped_payloads)))

        for topic, topic_payloads in grouped_payloads.items():
            message.append(write_short_string(topic))
            message.append(struct.pack('>i', len(topic_payloads)))

            for partition, payload in topic_payloads.items():
                message.append(struct.pack('>i', partition))

        msg = b''.join(message)
        return struct.pack('>i%ds' % len(msg), len(msg), msg)
Example #2
0
    def encode_offset_fetch_request(cls, client_id, correlation_id,
                                    group, payloads, from_kafka=False):
        """
        Encode some OffsetFetchRequest structs. The request is encoded using
        version 0 if from_kafka is false, indicating a request for Zookeeper
        offsets. It is encoded using version 1 otherwise, indicating a request
        for Kafka offsets.

        Arguments:
            client_id: string
            correlation_id: int
            group: string, the consumer group you are fetching offsets for
            payloads: list of OffsetFetchRequest
            from_kafka: bool, default False, set True for Kafka-committed offsets
        """
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = []
        reqver = 1 if from_kafka else 0
        message.append(cls._encode_message_header(client_id, correlation_id,
                                                  KafkaProtocol.OFFSET_FETCH_KEY,
                                                  version=reqver))

        message.append(write_short_string(group))
        message.append(struct.pack('>i', len(grouped_payloads)))

        for topic, topic_payloads in grouped_payloads.items():
            message.append(write_short_string(topic))
            message.append(struct.pack('>i', len(topic_payloads)))

            for partition, payload in topic_payloads.items():
                message.append(struct.pack('>i', partition))

        msg = b''.join(message)
        return struct.pack('>i%ds' % len(msg), len(msg), msg)
Example #3
0
    def encode_offset_commit_request(cls, client_id, correlation_id,
                                     group, payloads):
        """
        Encode some OffsetCommitRequest structs

        Arguments:
            client_id: string
            correlation_id: int
            group: string, the consumer group you are committing offsets for
            payloads: list of OffsetCommitRequest
        """
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = []
        message.append(cls._encode_message_header(client_id, correlation_id,
                                                  KafkaProtocol.OFFSET_COMMIT_KEY))
        message.append(write_short_string(group))
        message.append(struct.pack('>i', len(grouped_payloads)))

        for topic, topic_payloads in grouped_payloads.items():
            message.append(write_short_string(topic))
            message.append(struct.pack('>i', len(topic_payloads)))

            for partition, payload in topic_payloads.items():
                message.append(struct.pack('>iq', partition, payload.offset))
                message.append(write_short_string(payload.metadata))

        msg = b''.join(message)
        return struct.pack('>i%ds' % len(msg), len(msg), msg)
Example #4
0
    def encode_fetch_request(cls, client_id, correlation_id, payloads=None,
                             max_wait_time=100, min_bytes=4096):
        """
        Encodes some FetchRequest structs

        Arguments:
            client_id: string
            correlation_id: int
            payloads: list of FetchRequest
            max_wait_time: int, how long to block waiting on min_bytes of data
            min_bytes: int, the minimum number of bytes to accumulate before
                       returning the response
        """

        payloads = [] if payloads is None else payloads
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = []
        message.append(cls._encode_message_header(client_id, correlation_id,
                                                  KafkaProtocol.FETCH_KEY))

        # -1 is the replica id
        message.append(struct.pack('>iiii', -1, max_wait_time, min_bytes,
                                   len(grouped_payloads)))

        for topic, topic_payloads in grouped_payloads.items():
            message.append(write_short_string(topic))
            message.append(struct.pack('>i', len(topic_payloads)))
            for partition, payload in topic_payloads.items():
                message.append(struct.pack('>iqi', partition, payload.offset,
                                           payload.max_bytes))

        msg = b''.join(message)
        return struct.pack('>i%ds' % len(msg), len(msg), msg)
Example #5
0
    def encode_offset_commit_request(cls, client_id, correlation_id,
                                     group, payloads):
        """
        Encode some OffsetCommitRequest structs

        Arguments:
            client_id: string
            correlation_id: int
            group: string, the consumer group you are committing offsets for
            payloads: list of OffsetCommitRequest
        """
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = []
        message.append(cls._encode_message_header(client_id, correlation_id,
                                                  KafkaProtocol.OFFSET_COMMIT_KEY))
        message.append(write_short_string(group))
        message.append(struct.pack('>i', len(grouped_payloads)))

        for topic, topic_payloads in grouped_payloads.items():
            message.append(write_short_string(topic))
            message.append(struct.pack('>i', len(topic_payloads)))

            for partition, payload in topic_payloads.items():
                message.append(struct.pack('>iq', partition, payload.offset))
                message.append(write_short_string(payload.metadata))

        msg = b''.join(message)
        return struct.pack('>i%ds' % len(msg), len(msg), msg)
Example #6
0
    def encode_fetch_request(cls, client_id, correlation_id, payloads=None,
                             max_wait_time=100, min_bytes=4096):
        """
        Encodes some FetchRequest structs

        Arguments:
            client_id: string
            correlation_id: int
            payloads: list of FetchRequest
            max_wait_time: int, how long to block waiting on min_bytes of data
            min_bytes: int, the minimum number of bytes to accumulate before
                       returning the response
        """

        payloads = [] if payloads is None else payloads
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = []
        message.append(cls._encode_message_header(client_id, correlation_id,
                                                  KafkaProtocol.FETCH_KEY))

        # -1 is the replica id
        message.append(struct.pack('>iiii', -1, max_wait_time, min_bytes,
                                   len(grouped_payloads)))

        for topic, topic_payloads in grouped_payloads.items():
            message.append(write_short_string(topic))
            message.append(struct.pack('>i', len(topic_payloads)))
            for partition, payload in topic_payloads.items():
                message.append(struct.pack('>iqi', partition, payload.offset,
                                           payload.max_bytes))

        msg = b''.join(message)
        return struct.pack('>i%ds' % len(msg), len(msg), msg)
Example #7
0
    def encode_produce_request(cls,
                               client_id,
                               correlation_id,
                               payloads=None,
                               acks=1,
                               timeout=1000):
        """
        Encode some ProduceRequest structs

        Arguments:
            client_id: string
            correlation_id: int
            payloads: list of ProduceRequest
            acks: How "acky" you want the request to be
                0: immediate response
                1: written to disk by the leader
                2+: waits for this many number of replicas to sync
                -1: waits for all replicas to be in sync
            timeout: Maximum time the server will wait for acks from replicas.
                This is _not_ a socket timeout

        """
        payloads = [] if payloads is None else payloads
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = []
        message.append(
            cls._encode_message_header(client_id, correlation_id,
                                       KafkaProtocol.PRODUCE_KEY))

        message.append(
            struct.pack('>hii', acks, timeout, len(grouped_payloads)))

        for topic, topic_payloads in grouped_payloads.items():
            message.append(
                struct.pack('>h%dsi' % len(topic), len(topic), topic,
                            len(topic_payloads)))

            for partition, payload in topic_payloads.items():
                msg_set = KafkaProtocol._encode_message_set(payload.messages)
                message.append(
                    struct.pack('>ii%ds' % len(msg_set), partition,
                                len(msg_set), msg_set))

        msg = b''.join(message)
        return struct.pack('>i%ds' % len(msg), len(msg), msg)
Example #8
0
    def encode_offset_request(cls, client_id, correlation_id, payloads=None):
        payloads = [] if payloads is None else payloads
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = []
        message.append(cls._encode_message_header(client_id, correlation_id,
                                                  KafkaProtocol.OFFSET_KEY))

        # -1 is the replica id
        message.append(struct.pack('>ii', -1, len(grouped_payloads)))

        for topic, topic_payloads in grouped_payloads.items():
            message.append(write_short_string(topic))
            message.append(struct.pack('>i', len(topic_payloads)))

            for partition, payload in topic_payloads.items():
                message.append(struct.pack('>iqi', partition, payload.time,
                                           payload.max_offsets))

        msg = b''.join(message)
        return struct.pack('>i%ds' % len(msg), len(msg), msg)
Example #9
0
    def encode_offset_request(cls, client_id, correlation_id, payloads=None):
        payloads = [] if payloads is None else payloads
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = []
        message.append(cls._encode_message_header(client_id, correlation_id,
                                                  KafkaProtocol.OFFSET_KEY))

        # -1 is the replica id
        message.append(struct.pack('>ii', -1, len(grouped_payloads)))

        for topic, topic_payloads in grouped_payloads.items():
            message.append(write_short_string(topic))
            message.append(struct.pack('>i', len(topic_payloads)))

            for partition, payload in topic_payloads.items():
                message.append(struct.pack('>iqi', partition, payload.time,
                                           payload.max_offsets))

        msg = b''.join(message)
        return struct.pack('>i%ds' % len(msg), len(msg), msg)
Example #10
0
    def encode_produce_request(cls, client_id, correlation_id,
                               payloads=None, acks=1, timeout=1000):
        """
        Encode some ProduceRequest structs

        Arguments:
            client_id: string
            correlation_id: int
            payloads: list of ProduceRequest
            acks: How "acky" you want the request to be
                0: immediate response
                1: written to disk by the leader
                2+: waits for this many number of replicas to sync
                -1: waits for all replicas to be in sync
            timeout: Maximum time the server will wait for acks from replicas.
                This is _not_ a socket timeout

        """
        payloads = [] if payloads is None else payloads
        grouped_payloads = group_by_topic_and_partition(payloads)

        message = []
        message.append(cls._encode_message_header(client_id, correlation_id,
                                                  KafkaProtocol.PRODUCE_KEY))

        message.append(struct.pack('>hii', acks, timeout,
                                   len(grouped_payloads)))

        for topic, topic_payloads in grouped_payloads.items():
            message.append(struct.pack('>h%dsi' % len(topic), len(topic), topic,
                                       len(topic_payloads)))

            for partition, payload in topic_payloads.items():
                msg_set = KafkaProtocol._encode_message_set(payload.messages)
                message.append(struct.pack('>ii%ds' % len(msg_set), partition,
                                           len(msg_set), msg_set))

        msg = b''.join(message)
        return struct.pack('>i%ds' % len(msg), len(msg), msg)