def create_sub_tmp_file(self, system_name="", filename="", delete="yes"): """ Create temp file for parallel execution test """ path = data_Utils.get_object_from_datarepository( "parallel_exec_tmp_dir") if system_name != "" and filename == "": filename = data_Utils.getSystemData(self.datafile, system_name, "filename") elif system_name == "" and filename == "": pNote("No system or filename found, needs to provide at least one", "error") f = open(os.path.join(path, filename), "w") f.write("This is a test string") f.close() time.sleep(10) status = False if delete == "yes": try: file_Utils.delFile(os.path.join(path, filename)) status = True except OSError: pNote( "Cannot remove tmp file, no write access to {}".format( path), "error") else: status = True return status
def check_lab_equipment(self, system_name): """ Call the pc_replacement or testset_calibration KW to validate lab PC or test set calibration are up-to-date or not. """ wdesc = "Check lab PC replacement or test set calibration status" pNote(wdesc) #Resolve system_name and subsystem_list system_name, subsystem_list = Utils.data_Utils.resolve_system_subsystem_list(self.datafile, system_name) output_dict = {} status = True attempt = 1 if subsystem_list == None else len(subsystem_list) for i in range(attempt): result = False subsystem_name = subsystem_list[i] if subsystem_list != None else None #Put system_name in system_name[subsystem] format before calling #checking function call_system_name = system_name if subsystem_name is None \ else "{0}[{1}]".format(system_name, subsystem_name) eqpt_type = getSystemData(self.datafile, call_system_name, "eqpt_type") if eqpt_type is not False: if eqpt_type == "lab_pc": result, output_dict = self.pc_replacement(call_system_name) elif eqpt_type == "lab_testset": result, output_dict = self.testset_calibration(call_system_name) else: pNote("<eqpt_type>={0} provided for '{1}' is not "\ "supported".format(eqpt_type, call_system_name), "error") else: pNote("eqpt_type not provided for system={0}"\ .format(call_system_name), "warn") status = status and result return status, output_dict
def check_tmp_file_exists(self, system_name="", filename=""): """ check if temp folder exist in the parallel execution result tmp dir """ if system_name != "" and filename == "": filename = data_Utils.getSystemData(self.datafile, system_name, "filename") elif system_name == "" and filename == "": pNote("No system or filename found, needs to provide at least one", "error") path = data_Utils.get_object_from_datarepository( "parallel_exec_tmp_dir") path = os.path.join(path, filename) return file_Utils.fileExists(path)
def sub_from_wdf(datafile, string_list, td_sys_list=None, kw_system_name=None): """ substitute the patterns $wdf{} in the command/verify parameters with the values form the datafile """ from Framework.Utils.data_Utils import getSystemData newstring_list = [] for i in range(0, len(string_list)): string = string_list[i] td_sys = None if td_sys_list is None else td_sys_list[i] found = True while found: if string: match = re.search(r"(\$wdf\{)([^\}]*)(\})", string, re.IGNORECASE) if match is not None: found = True try: wdf_match = match.group(0).strip("$wdf{") wdf_match = wdf_match.strip("}") system_or_subsystem = wdf_match.split(".")[0].strip() if system_or_subsystem == "kw_system" or system_or_subsystem == "current_system": system_or_subsystem = kw_system_name elif system_or_subsystem == "target_system": system_or_subsystem = td_sys if td_sys is not None else kw_system_name tag_or_attr = wdf_match.split(".")[1].strip() value = getSystemData(datafile, system_or_subsystem, tag_or_attr) if not value: print_error("Value for '{0}' not provided in the datafile={1}"\ "under the system_or_subsystem = {2}".format(tag_or_attr, datafile, system_or_subsystem)) except Exception as err: print_error("Incorrect format provided for substituting value"\ "from warrior datafile in the test data file") print_error( "Correct format is $wdf{system_or_subsystemname.tag_or_attribute_name}" ) else: string = string.replace(match.group(0), value) else: found = False else: found = False newstring_list.append(string) return newstring_list
def send_messages(self, system_name, topic, value, partition=None, headers=None, timestamp=None, key=None): """ This keyword publishes messages to the topic in given kafka broker Input data file Usage: <credentials> <system name="kafka_server1" type="kafka_producer"> <ip>localhost</ip> <kafka_port>9092</kafka_port> <conn_type>kafka</conn_type> </system> </credentials> For complete list of supported parameters, check https://kafka-python.readthedocs.io/en/master/apidoc/KafkaProducer.html :Arguments: 1.system_name(string) : kafka broker system name in input data file 2.topic(string) : topic name to publish message 3.value(string) : message to publish 4.partition(int) : partition number, Optional 5.headers(list) : list of headers 6.timestamp(string) : timestamp 5.key(string) : key for the message, Optional :Returns: 1.status(bool) : True if message is published, else False """ wdesc = "publish value {} to topic {} in kafka broker {}".format( system_name, topic, value) pNote(wdesc) status = True if not data_repository.get("kafka_producer", None): print_info("creating kafka producer") conn_type = getSystemData(self.datafile, system_name, "conn_type") kafka_ip = getSystemData(self.datafile, system_name, "ip") kafka_port = getSystemData(self.datafile, system_name, "kafka_port") ca_file = getSystemData(self.datafile, system_name, "ssl_cafile") key_file = getSystemData(self.datafile, system_name, "ssl_keyfile") crl_file = getSystemData(self.datafile, system_name, "ssl_crlfile") ciphers = getSystemData(self.datafile, system_name, "ssl_ciphers") if conn_type.lower() != "kafka" or not kafka_ip or not kafka_port: status = False print_error( "conn_type should be 'kafka' in system configuration and ip, \ kafka_port should be provided") return status self.kafka_obj_producer = WarriorKafkaProducer(bootstrap_servers=\ [kafka_ip+":"+kafka_port], ssl_cafile=ca_file, ssl_keyfile=key_file, ssl_crlfile=crl_file, ssl_ciphers=ciphers, value_serializer=\ lambda x: dumps(x).encode('utf-8')) data_repository["kafka_producer"] = self.kafka_obj_producer else: self.kafka_obj_producer = data_repository["kafka_producer"] if not hasattr(self.kafka_obj_producer, "kafka_producer"): print_error("couldn't create connection to the kafka broker") result = False status = status and result else: result = self.kafka_obj_producer.send_messages(topic=topic, value=value, partition=partition, headers=headers, timestamp=timestamp, key=key) if not result: print_error("couldn't publish message to topic") status = status and result return status
def get_messages(self, system_name, list_topics, group_id='my-group', timeout=100, list_patterns=None, max_records=None, get_all_messages=False): """ This keyword gets the messages published to the specified topics Input data file Usage: <credentials> <system name="kafka_server1" type="kafka_consumer"> <ip>localhost</ip> <kafka_port>9092</kafka_port> <conn_type>kafka</conn_type> </system> </credentials> For complete list of supported parameters, check https://kafka-python.readthedocs.io/en/master/apidoc/KafkaConsumer.html :Arguments: 1.system_name(string) : kafka broker system name in inputdata file. 2.list_topics(list) : list of topics to subscribe. 3.group_id(string) : group id to use for subscription. 4.timeout(int) : timeout in milliseconds. 5.list_patterns(list) : list of patterns of topic names. 6.max_records(int) : maximum records to fetch 7.get_all_messages(bool) : True to fetch all messages in topic :Returns: 1.status(bool) : True if messages are fetched successfully, else False 2.output_dict : list of messages """ wdesc = "get messages subscribed to topics : {}".format(list_topics) pNote(wdesc) status = True output_dict = {} if not data_repository.get("kafka_consumer", None): print_info("creating kafka consumer") conn_type = getSystemData(self.datafile, system_name, "conn_type") kafka_ip = getSystemData(self.datafile, system_name, "ip") kafka_port = getSystemData(self.datafile, system_name, "kafka_port") ca_file = getSystemData(self.datafile, system_name, "ssl_cafile") key_file = getSystemData(self.datafile, system_name, "ssl_keyfile") crl_file = getSystemData(self.datafile, system_name, "ssl_crlfile") ciphers = getSystemData(self.datafile, system_name, "ssl_ciphers") if conn_type.lower() != "kafka" or not kafka_ip or not kafka_port: status = False print_error( "conn_type should be 'kafka' in system configuration and ip, \ kafka_port should be provided") return status self.kafka_obj_consumer = WarriorKafkaConsumer(bootstrap_servers=\ [kafka_ip+":"+kafka_port], ssl_cafile=ca_file, ssl_keyfile=key_file, ssl_crlfile=crl_file, ssl_ciphers=ciphers, group_id=group_id, auto_offset_reset='earliest', value_deserializer=\ lambda x: loads(x.decode('utf-8'))) data_repository["kafka_consumer"] = self.kafka_obj_consumer else: self.kafka_obj_consumer = data_repository["kafka_consumer"] if not hasattr(self.kafka_obj_consumer, "kafka_consumer"): print_error("couldn't create connection to the kafka broker") result = False status = status and result else: subscribe_required = False assigned_topics = self.kafka_obj_consumer.get_topics() if not assigned_topics: subscribe_required = True else: for topic in list_topics: if topic not in str(assigned_topics): subscribe_required = True if subscribe_required: result = self.kafka_obj_consumer.subscribe_to_topics( topics=list_topics, pattern=list_patterns) if not result: print_error("cannot subscribe to topics") status = status and result if status: messages = self.kafka_obj_consumer.get_messages( timeout=timeout, max_records=max_records, get_all_messages=get_all_messages) print_info( "messages received from subscribed topics {}".format( messages)) if messages: time_stamp = int(time.time()) output_dict["kafka_messages_{}".format(time_stamp)] = messages return status, output_dict