def encode_offset_fetch_request(cls, client_id, correlation_id, group, payloads, from_kafka=False): """ Encode some OffsetFetchRequest structs. The request is encoded using version 0 if from_kafka is false, indicating a request for Zookeeper offsets. It is encoded using version 1 otherwise, indicating a request for Kafka offsets. Arguments: client_id: string correlation_id: int group: string, the consumer group you are fetching offsets for payloads: list of OffsetFetchRequest from_kafka: bool, default False, set True for Kafka-committed offsets """ grouped_payloads = group_by_topic_and_partition(payloads) message = [] reqver = 1 if from_kafka else 0 message.append(cls._encode_message_header(client_id, correlation_id, KafkaProtocol.OFFSET_FETCH_KEY, version=reqver)) message.append(write_short_string(group)) message.append(struct.pack('>i', len(grouped_payloads))) for topic, topic_payloads in grouped_payloads.items(): message.append(write_short_string(topic)) message.append(struct.pack('>i', len(topic_payloads))) for partition, payload in topic_payloads.items(): message.append(struct.pack('>i', partition)) msg = b''.join(message) return struct.pack('>i%ds' % len(msg), len(msg), msg)
def encode_offset_commit_request(cls, client_id, correlation_id, group, payloads): """ Encode some OffsetCommitRequest structs Arguments: client_id: string correlation_id: int group: string, the consumer group you are committing offsets for payloads: list of OffsetCommitRequest """ grouped_payloads = group_by_topic_and_partition(payloads) message = [] message.append(cls._encode_message_header(client_id, correlation_id, KafkaProtocol.OFFSET_COMMIT_KEY)) message.append(write_short_string(group)) message.append(struct.pack('>i', len(grouped_payloads))) for topic, topic_payloads in grouped_payloads.items(): message.append(write_short_string(topic)) message.append(struct.pack('>i', len(topic_payloads))) for partition, payload in topic_payloads.items(): message.append(struct.pack('>iq', partition, payload.offset)) message.append(write_short_string(payload.metadata)) msg = b''.join(message) return struct.pack('>i%ds' % len(msg), len(msg), msg)
def encode_fetch_request(cls, client_id, correlation_id, payloads=None, max_wait_time=100, min_bytes=4096): """ Encodes some FetchRequest structs Arguments: client_id: string correlation_id: int payloads: list of FetchRequest max_wait_time: int, how long to block waiting on min_bytes of data min_bytes: int, the minimum number of bytes to accumulate before returning the response """ payloads = [] if payloads is None else payloads grouped_payloads = group_by_topic_and_partition(payloads) message = [] message.append(cls._encode_message_header(client_id, correlation_id, KafkaProtocol.FETCH_KEY)) # -1 is the replica id message.append(struct.pack('>iiii', -1, max_wait_time, min_bytes, len(grouped_payloads))) for topic, topic_payloads in grouped_payloads.items(): message.append(write_short_string(topic)) message.append(struct.pack('>i', len(topic_payloads))) for partition, payload in topic_payloads.items(): message.append(struct.pack('>iqi', partition, payload.offset, payload.max_bytes)) msg = b''.join(message) return struct.pack('>i%ds' % len(msg), len(msg), msg)
def encode_offset_request(cls, client_id, correlation_id, payloads=None): payloads = [] if payloads is None else payloads grouped_payloads = group_by_topic_and_partition(payloads) message = [] message.append(cls._encode_message_header(client_id, correlation_id, KafkaProtocol.OFFSET_KEY)) # -1 is the replica id message.append(struct.pack('>ii', -1, len(grouped_payloads))) for topic, topic_payloads in grouped_payloads.items(): message.append(write_short_string(topic)) message.append(struct.pack('>i', len(topic_payloads))) for partition, payload in topic_payloads.items(): message.append(struct.pack('>iqi', partition, payload.time, payload.max_offsets)) msg = b''.join(message) return struct.pack('>i%ds' % len(msg), len(msg), msg)