def run(cls, args, cluster_config): # Setup the Kafka client client = KafkaToolClient(cluster_config.broker_list) client.load_metadata_for_topics() topics_dict = cls.preprocess_args( args.groupid, args.topic, args.partitions, cluster_config, client, force=args.force, ) try: rewind_consumer_offsets( client, args.groupid, topics_dict, args.storage, ) except TypeError: print( "Error: Badly formatted input, please re-run command " "with --help option.", file=sys.stderr ) raise client.close()
def fetch_offsets(group, topics): # Setup the Kafka client config = get_cluster_config() client = KafkaToolClient(config.broker_list) offsets = get_current_consumer_offsets(client, group, topics, False) client.close() return offsets
def fetch_offsets(group, topics): # Setup the Kafka client config = get_cluster_config() client = KafkaToolClient(config.broker_list) offsets = get_current_consumer_offsets(client, group, topics, False) client.close() return offsets
def run(cls, args, cluster_config): # Setup the Kafka client client = KafkaToolClient(cluster_config.broker_list) client.load_metadata_for_topics() topics_dict = cls.preprocess_args( args.groupid, args.topic, args.partitions, cluster_config, client, force=args.force, use_admin_client=args.use_admin_client, ) try: advance_consumer_offsets( client, args.groupid, topics_dict, ) except TypeError: print("Error: Badly formatted input, please re-run command ", "with --help option.", file=sys.stderr) raise except UnknownMemberIdError: print( "Unable to advance offsets for group '{group_name}' from topic '{topic_name}'. \ You must ensure none of the consumers with this consumer group id are running before \ trying to advance the offsets stored in Kafka for this consumer group. Try stopping all \ of your consumers.".format(group_name=args.groupid, topic_name=args.topic), ) raise client.close()
def run(cls, args, cluster_config): # Setup the Kafka client client = KafkaToolClient(cluster_config.broker_list) client.load_metadata_for_topics() topics_dict = cls.preprocess_args( args.groupid, args.topic, args.partitions, cluster_config, client ) consumer_offsets_metadata = cls.get_offsets( client, args.groupid, topics_dict, args.storage, ) # Warn the user if a topic being subscribed to does not exist in # Kafka. for topic in topics_dict: if topic not in consumer_offsets_metadata: print( "Warning: Topic {topic} or one or more of it's partitions " "do not exist in Kafka".format(topic=topic), file=sys.stderr, ) client.close() if args.json: print_json(consumer_offsets_metadata) else: cls.print_output(consumer_offsets_metadata, args.watermark)
def commit_offsets(offsets, group): # Setup the Kafka client config = get_cluster_config() client = KafkaToolClient(config.broker_list) set_consumer_offsets( client, group, offsets, ) client.close()
def commit_offsets(offsets, group): # Setup the Kafka client config = get_cluster_config() client = KafkaToolClient(config.broker_list) set_consumer_offsets( client, group, offsets, ) client.close()
def run(cls, args, cluster_config): # Setup the Kafka client client = KafkaToolClient(cluster_config.broker_list) client.load_metadata_for_topics() topics_dict = cls.preprocess_args( groupid=args.groupid, topic=args.topic, partitions=args.partitions, cluster_config=cluster_config, client=client, quiet=args.json, storage=args.storage, ) consumer_offsets_metadata = cls.get_offsets( client, args.groupid, topics_dict, args.storage, ) client.close() if args.sort_by_distance: consumer_offsets_metadata = cls.sort_by_distance( consumer_offsets_metadata) elif args.sort_by_distance_percentage: consumer_offsets_metadata = cls.sort_by_distance_percentage( consumer_offsets_metadata) if args.json: partitions_info = [] for partitions in consumer_offsets_metadata.values(): for partition in partitions: partition_info = partition._asdict() partition_info['offset_distance'] = partition_info[ 'highmark'] - partition_info['current'] partition_info[ 'percentage_distance'] = cls.percentage_distance( partition_info['highmark'], partition_info['current']) partitions_info.append(partition_info) print_json(partitions_info) else: # Warn the user if a topic being subscribed to does not exist in # Kafka. for topic in topics_dict: if topic not in consumer_offsets_metadata: print( "Warning: Topic {topic} or one or more of it's partitions " "do not exist in Kafka".format(topic=topic), file=sys.stderr, ) cls.print_output(consumer_offsets_metadata, args.watermark)
def run(cls, args, cluster_config): # Setup the Kafka client client = KafkaToolClient(cluster_config.broker_list) client.load_metadata_for_topics() topics_dict = cls.preprocess_args( groupid=args.groupid, topic=args.topic, partitions=args.partitions, cluster_config=cluster_config, client=client, quiet=args.json, ) consumer_offsets_metadata = cls.get_offsets( client, args.groupid, topics_dict, ) client.close() if args.sort_by_distance: consumer_offsets_metadata = cls.sort_by_distance(consumer_offsets_metadata) elif args.sort_by_distance_percentage: consumer_offsets_metadata = cls.sort_by_distance_percentage(consumer_offsets_metadata) if args.json: partitions_info = [] for partitions in consumer_offsets_metadata.values(): for partition in partitions: partition_info = partition._asdict() partition_info['offset_distance'] = partition_info['highmark'] - partition_info['current'] partition_info['percentage_distance'] = cls.percentage_distance( partition_info['highmark'], partition_info['current'] ) partitions_info.append(partition_info) print_json(partitions_info) else: # Warn the user if a topic being subscribed to does not exist in # Kafka. for topic in topics_dict: if topic not in consumer_offsets_metadata: print( "Warning: Topic {topic} or one or more of it's partitions " "do not exist in Kafka".format(topic=topic), file=sys.stderr, ) cls.print_output(consumer_offsets_metadata, args.watermark)
def run(cls, args, cluster_config): # Setup the Kafka client client = KafkaToolClient(cluster_config.broker_list) client.load_metadata_for_topics() topics_dict = cls.preprocess_args( groupid=args.groupid, topic=args.topic, partitions=args.partitions, cluster_config=cluster_config, client=client, storage=args.storage, ) try: consumer_offsets_metadata = get_consumer_offsets_metadata( client, args.groupid, topics_dict, offset_storage=args.storage, ) except KafkaUnavailableError: print( "Error: Encountered error with Kafka, please try again later.", file=sys.stderr, ) raise # Warn the user if a topic being subscribed to does not exist in Kafka. for topic in topics_dict: if topic not in consumer_offsets_metadata: print( "Warning: Topic {topic} does not exist in Kafka".format( topic=topic), file=sys.stderr, ) cls.save_offsets( consumer_offsets_metadata, topics_dict, args.json_file, args.groupid, ) client.close()
def run(cls, args, cluster_config): # Setup the Kafka client client = KafkaToolClient(cluster_config.broker_list) client.load_metadata_for_topics() # Let's verify that the consumer does exist in Zookeeper if not args.force: cls.get_topics_from_consumer_group_id( cluster_config, args.groupid, storage=args.storage, ) try: results = set_consumer_offsets( client, args.groupid, cls.new_offsets_dict, offset_storage=args.storage, ) except TypeError: print( "Error: Badly formatted input, please re-run command " "with --help option.", file=sys.stderr) raise client.close() if results: final_error_str = ( "Error: Unable to commit consumer offsets for:\n") for result in results: error_str = ( " Topic: {topic} Partition: {partition} Error: {error}\n". format(topic=result.topic, partition=result.partition, error=result.error)) final_error_str += error_str print(final_error_str, file=sys.stderr) sys.exit(1)
def run(cls, args, cluster_config): # Setup the Kafka client client = KafkaToolClient(cluster_config.broker_list) client.load_metadata_for_topics() # Let's verify that the consumer does exist in Zookeeper if not args.force: cls.get_topics_from_consumer_group_id( cluster_config, args.groupid, ) try: results = set_consumer_offsets( client, args.groupid, cls.new_offsets_dict, offset_storage=args.storage, ) except TypeError: print( "Error: Badly formatted input, please re-run command " "with --help option.", file=sys.stderr ) raise client.close() if results: final_error_str = ("Error: Unable to commit consumer offsets for:\n") for result in results: error_str = ( " Topic: {topic} Partition: {partition} Error: {error}\n".format( topic=result.topic, partition=result.partition, error=result.error ) ) final_error_str += error_str print(final_error_str, file=sys.stderr) sys.exit(1)
def run(cls, args, cluster_config): # Setup the Kafka client client = KafkaToolClient(cluster_config.broker_list) client.load_metadata_for_topics() topics_dict = cls.preprocess_args( groupid=args.groupid, topic=args.topic, partitions=args.partitions, cluster_config=cluster_config, client=client, ) try: consumer_offsets_metadata = get_consumer_offsets_metadata( client, args.groupid, topics_dict, ) except KafkaUnavailableError: print( "Error: Encountered error with Kafka, please try again later.", file=sys.stderr, ) raise # Warn the user if a topic being subscribed to does not exist in Kafka. for topic in topics_dict: if topic not in consumer_offsets_metadata: print( "Warning: Topic {topic} does not exist in Kafka" .format(topic=topic), file=sys.stderr, ) cls.save_offsets( consumer_offsets_metadata, topics_dict, args.json_file, args.groupid, ) client.close()
def run(cls, args, cluster_config): # Setup the Kafka client client = KafkaToolClient(cluster_config.broker_list) client.load_metadata_for_topics() topics_dict = cls.preprocess_args(args.groupid, args.topic, args.partitions, cluster_config, client, quiet=args.json) consumer_offsets_metadata = cls.get_offsets( client, args.groupid, topics_dict, args.storage, ) client.close() if args.json: print_json([ p._asdict() for partitions in consumer_offsets_metadata.values() for p in partitions ]) else: # Warn the user if a topic being subscribed to does not exist in # Kafka. for topic in topics_dict: if topic not in consumer_offsets_metadata: print( "Warning: Topic {topic} or one or more of it's partitions " "do not exist in Kafka".format(topic=topic), file=sys.stderr, ) cls.print_output(consumer_offsets_metadata, args.watermark)
def run(cls, args, cluster_config): # Setup the Kafka client client = KafkaToolClient(cluster_config.broker_list) client.load_metadata_for_topics() watermarks = {} if args.exact: watermarks = cls.get_watermarks( client, args.topic, exact=True, ) else: watermarks = cls.get_watermarks( client, args.topic, exact=False, ) client.close() if args.json: print_json(watermarks) else: cls.print_output(watermarks)