def create_sub_tmp_file(self, system_name="", filename="", delete="yes"): """ Create temp file for parallel execution test """ path = data_Utils.get_object_from_datarepository( "parallel_exec_tmp_dir") if system_name != "" and filename == "": filename = data_Utils.getSystemData(self.datafile, system_name, "filename") elif system_name == "" and filename == "": pNote("No system or filename found, needs to provide at least one", "error") f = open(os.path.join(path, filename), "w") f.write("This is a test string") f.close() time.sleep(10) status = False if delete == "yes": try: file_Utils.delFile(os.path.join(path, filename)) status = True except OSError: pNote( "Cannot remove tmp file, no write access to {}".format( path), "error") else: status = True return status
def check_tmp_file_exists(self, system_name="", filename=""): """ check if temp folder exist in the parallel execution result tmp dir """ if system_name != "" and filename == "": filename = data_Utils.getSystemData(self.datafile, system_name, "filename") elif system_name == "" and filename == "": pNote("No system or filename found, needs to provide at least one", "error") path = data_Utils.get_object_from_datarepository( "parallel_exec_tmp_dir") path = os.path.join(path, filename) return file_Utils.fileExists(path)
def check_lab_equipment(self, system_name): """ Call the pc_replacement or testset_calibration KW to validate lab PC or test set calibration are up-to-date or not. """ wdesc = "Check lab PC replacement or test set calibration status" pNote("KEYWORD: check_lab_equipment | Description: {0}".format(wdesc)) #Resolve system_name and subsystem_list system_name, subsystem_list = Utils.data_Utils.resolve_system_subsystem_list( self.datafile, system_name) output_dict = {} status = True attempt = 1 if subsystem_list == None else len(subsystem_list) for i in range(attempt): result = False subsystem_name = subsystem_list[ i] if subsystem_list != None else None #Put system_name in system_name[subsystem] format before calling #checking function call_system_name = system_name if subsystem_name is None \ else "{0}[{1}]".format(system_name, subsystem_name) eqpt_type = getSystemData(self.datafile, call_system_name, "eqpt_type") if eqpt_type is not False: if eqpt_type == "lab_pc": result, output_dict = self.pc_replacement(call_system_name) elif eqpt_type == "lab_testset": result, output_dict = self.testset_calibration( call_system_name) else: pNote("<eqpt_type>={0} provided for '{1}' is not "\ "supported".format(eqpt_type, call_system_name), "error") else: pNote("eqpt_type not provided for system={0}"\ .format(call_system_name), "warn") status = status and result return status, output_dict
def send_messages(self, system_name, topic, value, partition=None, headers=None, timestamp=None, key=None): """ This keyword publishes messages to the topic in given kafka broker Input data file Usage: <credentials> <system name="kafka_server1" type="kafka_producer"> <ip>localhost</ip> <kafka_port>9092</kafka_port> </system> </credentials> For complete list of supported parameters, check https://kafka-python.readthedocs.io/en/master/apidoc/KafkaProducer.html :Arguments: 1.system_name(string) : kafka broker system name in input data file 2.topic(string) : topic name to publish message 3.value(string) : message to publish 4.partition(int) : partition number, Optional 5.headers(list) : list of headers 6.timestamp(string) : timestamp 5.key(string) : key for the message, Optional :Returns: 1.status(bool) : True if message is published, else False """ wdesc = "publish value {} to topic {} in kafka broker {}".format( system_name, topic, value) pNote("KEYWORD: send_messages | Description: {0}".format(wdesc)) status = True if not data_repository.get("kafka_producer", None): print_info("creating kafka producer") # mapper file will be available for netconf backup map_file = data_repository.get('wt_mapfile', None) if map_file: ca_file = key_file = crl_file = ciphers = None config = ConfigObj(map_file) mapper = config.get('CREDENTIALS', None).get( 'kafka_server', None) if config.get('CREDENTIALS', None) else None if not mapper: return False status, mapper_data = Utils.data_Utils.replace_var( mapper, {}, {}) if not status: return status kafka_ip = config["CREDENTIALS"]["kafka_server"].get( "kafka_host", None) kafka_port = config["CREDENTIALS"]["kafka_server"].get( "kafka_port", None) ca_file = config["CREDENTIALS"]["kafka_server"].get( "ca_file", None) key_file = config["CREDENTIALS"]["kafka_server"].get( "key_file", None) crl_file = config["CREDENTIALS"]["kafka_server"].get( "crl_file", None) ciphers = config["CREDENTIALS"]["kafka_server"].get( "ciphers", None) else: kafka_ip = getSystemData(self.datafile, system_name, "ip") kafka_port = getSystemData(self.datafile, system_name, "kafka_port") ca_file = getSystemData(self.datafile, system_name, "ssl_cafile") key_file = getSystemData(self.datafile, system_name, "ssl_keyfile") crl_file = getSystemData(self.datafile, system_name, "ssl_crlfile") ciphers = getSystemData(self.datafile, system_name, "ssl_ciphers") if not kafka_ip or not kafka_port: status = False print_error("ip, kafka_port should be provided") return status self.kafka_obj_producer = WarriorKafkaProducer(bootstrap_servers=\ [kafka_ip+":"+kafka_port], acks='all', request_timeout_ms=1000000, api_version_auto_timeout_ms=1000000, ssl_cafile=ca_file, ssl_keyfile=key_file, ssl_crlfile=crl_file, ssl_ciphers=ciphers, value_serializer=\ lambda x: dumps(x).encode('utf-8')) else: self.kafka_obj_producer = data_repository["kafka_producer"] # handling string and dict as input try: value = eval(value) except: value = value if not hasattr(self.kafka_obj_producer, "kafka_producer"): print_error("couldn't create connection to the kafka broker") result = False status = status and result else: data_repository["kafka_producer"] = self.kafka_obj_producer result = self.kafka_obj_producer.send_messages(topic=topic, value=value, partition=partition, headers=headers, timestamp=timestamp, key=key) if not result: print_error("couldn't publish message to topic") status = status and result return status
def get_messages(self, system_name, list_topics, group_id='my-group', timeout=100, list_patterns=None, max_records=None, get_all_messages=False): """ This keyword gets the messages published to the specified topics Input data file Usage: <credentials> <system name="kafka_server1" type="kafka_consumer"> <ip>localhost</ip> <kafka_port>9092</kafka_port> </system> </credentials> For complete list of supported parameters, check https://kafka-python.readthedocs.io/en/master/apidoc/KafkaConsumer.html :Arguments: 1.system_name(string) : kafka broker system name in inputdata file. 2.list_topics(list) : list of topics to subscribe. 3.group_id(string) : group id to use for subscription. 4.timeout(int) : timeout in milliseconds. 5.list_patterns(list) : list of patterns of topic names. 6.max_records(int) : maximum records to fetch 7.get_all_messages(bool) : True to fetch all messages in topic :Returns: 1.status(bool) : True if messages are fetched successfully, else False 2.output_dict : list of messages """ wdesc = "get messages subscribed to topics : {}".format(list_topics) pNote("KEYWORD: get_messages | Description: {0}".format(wdesc)) status = True output_dict = {} if not data_repository.get("kafka_consumer", None): print_info("creating kafka consumer") kafka_ip = getSystemData(self.datafile, system_name, "ip") kafka_port = getSystemData(self.datafile, system_name, "kafka_port") ca_file = getSystemData(self.datafile, system_name, "ssl_cafile") key_file = getSystemData(self.datafile, system_name, "ssl_keyfile") crl_file = getSystemData(self.datafile, system_name, "ssl_crlfile") ciphers = getSystemData(self.datafile, system_name, "ssl_ciphers") if not kafka_ip or not kafka_port: status = False print_error("ip, kafka_port should be provided") return status self.kafka_obj_consumer = WarriorKafkaConsumer(bootstrap_servers=\ [kafka_ip+":"+kafka_port], ssl_cafile=ca_file, ssl_keyfile=key_file, ssl_crlfile=crl_file, ssl_ciphers=ciphers, group_id=group_id, auto_offset_reset='earliest', value_deserializer=\ lambda x: loads(x.decode('utf-8'))) else: self.kafka_obj_consumer = data_repository["kafka_consumer"] if not hasattr(self.kafka_obj_consumer, "kafka_consumer"): print_error("couldn't create connection to the kafka broker") result = False status = status and result else: data_repository["kafka_consumer"] = self.kafka_obj_consumer subscribe_required = False assigned_topics = self.kafka_obj_consumer.get_topics() if not assigned_topics: subscribe_required = True else: for topic in list_topics: if topic not in str(assigned_topics): subscribe_required = True if subscribe_required: result = self.kafka_obj_consumer.subscribe_to_topics( topics=list_topics, pattern=list_patterns) if not result: print_error("cannot subscribe to topics") status = status and result if status: messages = self.kafka_obj_consumer.get_messages( timeout=timeout, max_records=max_records, get_all_messages=get_all_messages) print_info( "messages received from subscribed topics {}".format( messages)) if messages: time_stamp = int(time.time()) output_dict["kafka_messages_{}".format(time_stamp)] = messages return status, output_dict
def send_messages(self, system_name, topic, value, partition=None, headers=None, timestamp=None, key=None): """ This keyword publishes messages to the topic in given kafka broker Input data file Usage: <credentials> <system name="kafka_server1" type="kafka_producer"> <ip>localhost</ip> <kafka_port>9092</kafka_port> </system> </credentials> For complete list of supported parameters, check https://kafka-python.readthedocs.io/en/master/apidoc/KafkaProducer.html :Arguments: 1.system_name(string) : kafka broker system name in input data file 2.topic(string) : topic name to publish message 3.value(string) : message to publish 4.partition(int) : partition number, Optional 5.headers(list) : list of headers 6.timestamp(string) : timestamp 5.key(string) : key for the message, Optional :Returns: 1.status(bool) : True if message is published, else False """ wdesc = "publish value {} to topic {} in kafka broker {}".format( system_name, topic, value) pNote(wdesc) status = True if not data_repository.get("kafka_producer", None): print_info("creating kafka producer") kafka_ip = getSystemData(self.datafile, system_name, "ip") kafka_port = getSystemData(self.datafile, system_name, "kafka_port") ca_file = getSystemData(self.datafile, system_name, "ssl_cafile") key_file = getSystemData(self.datafile, system_name, "ssl_keyfile") crl_file = getSystemData(self.datafile, system_name, "ssl_crlfile") ciphers = getSystemData(self.datafile, system_name, "ssl_ciphers") if not kafka_ip or not kafka_port: status = False print_error("ip, kafka_port should be provided") return status self.kafka_obj_producer = WarriorKafkaProducer(bootstrap_servers=\ [kafka_ip+":"+kafka_port], ssl_cafile=ca_file, ssl_keyfile=key_file, ssl_crlfile=crl_file, ssl_ciphers=ciphers, value_serializer=\ lambda x: dumps(x).encode('utf-8')) data_repository["kafka_producer"] = self.kafka_obj_producer else: self.kafka_obj_producer = data_repository["kafka_producer"] # handling string and dict as input try: value = eval(value) except: value = value if not hasattr(self.kafka_obj_producer, "kafka_producer"): print_error("couldn't create connection to the kafka broker") result = False status = status and result else: result = self.kafka_obj_producer.send_messages(topic=topic, value=value, partition=partition, headers=headers, timestamp=timestamp, key=key) if not result: print_error("couldn't publish message to topic") status = status and result return status
def update_mongo_db(self, system_name, collection, update_json=None, filter_json=None): ''' Update or Insert doc in Mongo db collection ''' wdesc = "Update or Insert doc in Mongo db collection" status, output, dictionary = False, {}, {} try: #mongo parameters host = getSystemData(self.datafile, system_name, "ip") port = getSystemData(self.datafile, system_name, "port") database = getSystemData(self.datafile, system_name, "db") #username = getSystemData(self.datafile, system_name, "username") #password = getSystemData(self.datafile, system_name, "password") collection = get_object_from_datarepository(collection) client = MongoClient(host, int(port)) db = client[database] col = db[collection] if update_json and filter_json: update_json = get_object_from_datarepository(update_json) filter_json = get_object_from_datarepository(filter_json) updated_doc = col.find_one(filter_json) if not updated_doc: print_error( "no doc matched the given filter '{}' in collection '{}'" .format(filter_json, collection)) else: result = col.update_one(filter_json, update_json) if result: print_info( 'successfully updated doc in mongo db collection : {}' .format(collection)) updated_doc = col.find_one(filter_json) print_info("updated doc : {}".format(updated_doc)) status = True else: print_error('cannot update doc in mongo db') elif update_json and not filter_json: update_json = get_object_from_datarepository(update_json) result = col.insert_one(update_json) if result: status = True print_info( 'successfully updated mongo db collection : {}, _id : {}' .format(collection, result.inserted_id)) else: print_error('cannot insert doc in mongo db') elif not update_json and filter_json: filter_json = get_object_from_datarepository(filter_json) result = col.find_one(filter_json) if result: status = True print_info('successfully found doc with fiven filter') else: print_error('cannot find doc in mongo db') else: print_error( 'atleast one of update_doc or filter_doc are mandatory') except Exception as err: print_error("ERROR: error while updating db: {}".format(err)) update_datarepository( {"error_description": 'Error while updating mongo db'}) return status, output