def get_client_properties(self): from beaver.component.kafka import Kafka from beaver.component.ambari import Ambari Kafka.alterUser( userName=self.client_username, config="'SCRAM-SHA-256=[iterations=8192,password=%s]," "SCRAM-SHA-512=[password=%s]'" % (self.client_password, self.client_password) ) is_ambari_enc = Ambari.is_ambari_encrypted() kafka_jaas_config = Ambari.getConfig( "kafka_jaas_conf", webURL=Ambari.getWebUrl(is_hdp=False, is_enc=is_ambari_enc) ) replacement_jaas_entry = self.jaas_entry if not Kafka._isSecure: replacement_jaas_entry = "\nKafkaServer {%s\n};" % self.jaas_entry if self.to_be_replaced + replacement_jaas_entry not in kafka_jaas_config['content']: print "old : %s" % kafka_jaas_config['content'] kafka_jaas_config['content'] = kafka_jaas_config['content'].replace( self.to_be_replaced, self.to_be_replaced + replacement_jaas_entry ) print "new : %s" % kafka_jaas_config['content'] Ambari.setConfig( "kafka_jaas_conf", kafka_jaas_config, webURL=Ambari.getWebUrl(is_hdp=False, is_enc=is_ambari_enc) ) Ambari.restart_services_with_stale_configs() time.sleep(20) return {'sasl.jaas.config': self.jaas_entry.replace("\n", " "), 'sasl.mechanism': 'SCRAM-SHA-256'}
def publish_message_successfully(cls, topic_name, message): exit_code, stdout = Kafka.runConsoleProducer( topic_name, brokerlist=Kafka.get_broker_list(), message=message) cls.kru_assert( exit_code == 0, "Kafka Producer for topic: %s message: %s" % (topic_name, message)) return exit_code
def create_topic_successfully(cls, topic_name, replication=1): exit_code, stdout = Kafka.createTopic(topic_name, replication=replication, runallowuser=False, user=Kafka.getkafkaAdminUser()) cls.kru_assert(exit_code == 0, "Kafka topic - %s failed at creation" % topic_name) Kafka.grantAllPermissionOnTopic(topic_name)
def run(self, messages=None, topic="defaultTopic"): if not messages: messages = ["hello world"] for message in self.messages: exit_code, _stdout = Kafka.runConsoleProducer( self.topic, brokerlist=Kafka.get_broker_list(), message=message) assert exit_code == 0, "Kafka producer for %s failed" % (topic)
def _data_loss_test_post_ru(cls): exit_code, stdout = Kafka.runConsoleConsumer( cls.TOPIC_NAME, "--from-beginning --max-messages 1") cls.kru_assert( cls.msg in stdout, "The messages produced: %s was not found in stdout: %s" % (cls.msg, stdout))
def verifyLongRunningJob(cls): ''' Validate long running background job after end of all component upgrade ''' cls._data_loss_test_post_ru() cls._broker_id_test_post_ru() cls._ru_stop_thread_event.set() cls._background_thread.join() cls.kru_assert( len(cls._ru_messages) > 5, "Expected at least 5 messages publish to topic %s found %s" % (cls.RU_TOPIC_NAME, str(cls._ru_messages))) exit_code, stdout = Kafka.runConsoleConsumer( cls.RU_TOPIC_NAME, "--from-beginning --max-messages " + str(len(cls._ru_messages))) print_stdout_flag = False # the stdout can be huge - we cant't print it in a loop or make it part of message for msg in cls._ru_messages: cls.kru_assert( msg in stdout, "The messages produced: %s was not found in stdout." % msg) if msg not in stdout: print_stdout_flag = True if print_stdout_flag: logger.info("stdout is: " + stdout) logger.info("cls._ru_messages is: " + str(cls._ru_messages)) logger.info("kafka brokers have come up properly after upgrade.")
def run_smoke_test(cls, config=None): ''' Run smoke test for kafka ''' from beaver.component.kafka import Kafka TOPIC_NAME = "kafka_ru_smoke-%s" % os.getpid() msg = "This is Kafka Smoke TEST for RU." exit_code, stdout = Kafka.createTopic(TOPIC_NAME, runallowuser=False, user=Kafka.getkafkaAdminUser()) cls.kru_assert(exit_code == 0, "Kafka topic - %s failed at creation" % TOPIC_NAME) Kafka.grantAllPermissionOnTopic(TOPIC_NAME) exit_code, stdout = Kafka.runConsoleProducer( TOPIC_NAME, brokerlist=Kafka.get_broker_list(), message=msg) cls.kru_assert( exit_code == 0, "Kafka Producer for topic: %s message: %s" % (TOPIC_NAME, msg)) exit_code, stdout = Kafka.runConsoleConsumer( TOPIC_NAME, "--from-beginning --max-messages 1") cls.kru_assert( msg in stdout, "The messages produced: %s was not found in stdout: %s" % (msg, stdout)) logger.info("kafka brokers have come up properly after upgrade.")
def Kafka_getServiceLogDir(cls, logoutput=True): try: from beaver.component.kafka import Kafka return Kafka.getServiceLogDir() except Exception: if logoutput: logger.error( "Exception occured during Kafka_getServiceLogDir() call") logger.error(traceback.format_exc()) kafka_log_default_dir = "/var/log/kafka" return kafka_log_default_dir
def _broker_id_test_pre_ru(cls): _broker_id_mapping = Kafka.get_broker_id_from_all_brokers() cls.broker_id_mapping.update(_broker_id_mapping)
def _broker_id_test_post_ru(cls): _broker_id_mapping = Kafka.get_broker_id_from_all_brokers() cls.kru_assert( cls.broker_id_mapping == _broker_id_mapping, "broker.id mapping has changed expected %s found %s" % (cls.broker_id_mapping, _broker_id_mapping))