def __init__(self, context, num_nodes, kafka, topic, num_records, record_size, throughput, settings={}, intermediate_stats=False, client_id="producer-performance", jmx_object_names=None, jmx_attributes=[]): JmxMixin.__init__(self, num_nodes, jmx_object_names, jmx_attributes) PerformanceService.__init__(self, context, num_nodes) self.kafka = kafka self.security_config = kafka.security_config.client_config() self.args = { 'topic': topic, 'kafka_opts': self.security_config.kafka_opts, 'num_records': num_records, 'record_size': record_size, 'throughput': throughput } self.settings = settings self.intermediate_stats = intermediate_stats self.client_id = client_id
def __init__(self, context, num_nodes, kafka, topic, security_protocol=None, new_consumer=None, message_validator=None, from_beginning=True, consumer_timeout_ms=None, client_id="console-consumer", jmx_object_names=None, jmx_attributes=[]): """ Args: context: standard context num_nodes: number of nodes to use (this should be 1) kafka: kafka service topic: consume from this topic security_protocol: security protocol for Kafka connections new_consumer: use new Kafka consumer if True message_validator: function which returns message or None from_beginning: consume from beginning if True, else from the end consumer_timeout_ms: corresponds to consumer.timeout.ms. consumer process ends if time between successively consumed messages exceeds this timeout. Setting this and waiting for the consumer to stop is a pretty good way to consume all messages in a topic. """ JmxMixin.__init__(self, num_nodes, jmx_object_names, jmx_attributes) PerformanceService.__init__(self, context, num_nodes) self.kafka = kafka self.new_consumer = new_consumer self.args = { 'topic': topic, } self.consumer_timeout_ms = consumer_timeout_ms self.from_beginning = from_beginning self.message_validator = message_validator self.messages_consumed = {idx: [] for idx in range(1, num_nodes + 1)} self.client_id = client_id # Process client configuration self.prop_file = self.render( 'console_consumer.properties', consumer_timeout_ms=self.consumer_timeout_ms, client_id=self.client_id) # Add security properties to the config. If security protocol is not specified, # use the default in the template properties. self.security_config = SecurityConfig(security_protocol, self.prop_file) self.security_protocol = self.security_config.security_protocol if self.new_consumer is None: self.new_consumer = self.security_protocol == SecurityConfig.SSL if self.security_protocol == SecurityConfig.SSL and not self.new_consumer: raise Exception( "SSL protocol is supported only with the new consumer") self.prop_file += str(self.security_config)
def __init__(self, context, num_nodes, kafka, topic, num_records, record_size, throughput, version=TRUNK, settings=None, intermediate_stats=False, client_id="producer-performance", jmx_object_names=None, jmx_attributes=None): JmxMixin.__init__(self, num_nodes, jmx_object_names, jmx_attributes or []) PerformanceService.__init__(self, context, num_nodes) self.logs = { "producer_performance_stdout": { "path": ProducerPerformanceService.STDOUT_CAPTURE, "collect_default": True }, "producer_performance_stderr": { "path": ProducerPerformanceService.STDERR_CAPTURE, "collect_default": True }, "producer_performance_log": { "path": ProducerPerformanceService.LOG_FILE, "collect_default": True }, "jmx_log": { "path": "/mnt/jmx_tool.log", "collect_default": jmx_object_names is not None } } self.kafka = kafka self.security_config = kafka.security_config.client_config() security_protocol = self.security_config.security_protocol assert version >= V_0_9_0_0 or security_protocol == SecurityConfig.PLAINTEXT, \ "Security protocol %s is only supported if version >= 0.9.0.0, version %s" % (self.security_config, str(version)) self.args = { 'topic': topic, 'kafka_opts': self.security_config.kafka_opts, 'num_records': num_records, 'record_size': record_size, 'throughput': throughput } self.settings = settings or {} self.intermediate_stats = intermediate_stats self.client_id = client_id for node in self.nodes: node.version = version
def clean_node(self, node): if self.alive(node): self.logger.warn( "%s %s was still alive at cleanup time. Killing forcefully..." % (self.__class__.__name__, node.account)) JmxMixin.clean_node(self, node) PerformanceService.clean_node(self, node) node.account.ssh("rm -rf %s" % ConsoleConsumer.PERSISTENT_ROOT, allow_fail=False) self.security_config.clean_node(node)
def __init__(self, context, num_nodes, kafka, topic, num_records, record_size, throughput, version=TRUNK, settings={}, intermediate_stats=False, client_id="producer-performance", jmx_object_names=None, jmx_attributes=[]): JmxMixin.__init__(self, num_nodes, jmx_object_names, jmx_attributes) PerformanceService.__init__(self, context, num_nodes) self.logs = { "producer_performance_stdout": { "path": ProducerPerformanceService.STDOUT_CAPTURE, "collect_default": True}, "producer_performance_stderr": { "path": ProducerPerformanceService.STDERR_CAPTURE, "collect_default": True}, "producer_performance_log": { "path": ProducerPerformanceService.LOG_FILE, "collect_default": True}, "jmx_log": { "path": "/mnt/jmx_tool.log", "collect_default": jmx_object_names is not None } } self.kafka = kafka self.security_config = kafka.security_config.client_config() security_protocol = self.security_config.security_protocol assert version >= V_0_9_0_0 or security_protocol == SecurityConfig.PLAINTEXT, \ "Security protocol %s is only supported if version >= 0.9.0.0, version %s" % (self.security_config, str(version)) self.args = { 'topic': topic, 'kafka_opts': self.security_config.kafka_opts, 'num_records': num_records, 'record_size': record_size, 'throughput': throughput } self.settings = settings self.intermediate_stats = intermediate_stats self.client_id = client_id for node in self.nodes: node.version = version
def start_node(self, node): PerformanceService.start_node(self, node)