def logical_decision(exec_condition, exec_cond_var, operator="eq"): """ Handle the logical decision for the value comparison :param: exec_condition: value from data_repo to be compared exec_cond_var: user provided value to be compared operator: math operator in plain English :return: True if condition match, else return False """ status = True result = None if type(get_object_from_datarepository(exec_condition)) != type(exec_cond_var): pNote("Comparing different type of value, please check the conditional value type", "ERROR") status = False elif operator in MATH_OPERATION and\ not isinstance(exec_condition, int) and not isinstance(exec_condition, float): pNote("Comparing non-numerical value using numerical operator,"\ "please check value and operator type", "ERROR") status = False if status and operator == "eq": result = True if get_object_from_datarepository(exec_condition) == exec_cond_var else False elif status and operator == "ne": result = True if get_object_from_datarepository(exec_condition) != exec_cond_var else False elif status and operator in MATH_OPERATION: result, _ = verify_data(exec_cond_var, exec_condition, "float" if exec_condition.startswith("float_") else "int", operator) else: pNote("Execution condition failed for expected value: {} , operator: {}, actual value: {}"\ .format(exec_cond_var, operator, get_object_from_datarepository(exec_condition)), "WARNING") return result
def generate_timestamp_delta(self, stored_delta_key, timestamp_key, desired_status): """ test keyword created for runmode_timer Generate a delta from comparing current time with store timestamp save the delta and current timestamp in repo for keyword verify_delta :Argument: stored_delta_key = key name to store the list of delta timestamp_key = key name to store the timestamp desired_status = user desired status input pass->true, fail->false and everything else ->exception """ cur_ts = datetime_utils.get_current_timestamp() result_dict = {timestamp_key: cur_ts} status = self.local_data_test(desired_status) previous_time = data_Utils.get_object_from_datarepository( timestamp_key) stored_delta = data_Utils.get_object_from_datarepository( stored_delta_key) if previous_time: delta = datetime_utils.get_time_delta(previous_time, cur_ts) if stored_delta: stored_delta.append(delta) result_dict.update({stored_delta_key: stored_delta}) else: result_dict.update({stored_delta_key: [delta]}) return status, result_dict
def close(self, system_name=None, external_system=None, external_system_session=None): """ close the gnmi streaming or polling :param system_name: :param external_system: :param external_system_session: :return: """ status = False if system_name: __gnmi_obj = data_Utils.get_object_from_datarepository(str(system_name)+"_gnmi_session") if __gnmi_obj: gnmi_obj = __gnmi_obj else: gnmi_obj = None else: session_id = data_Utils.get_session_id(external_system, external_system_session) gnmi_obj = data_Utils.get_object_from_datarepository(session_id) gnmi_obj.sendcontrol('C') try: gnmi_obj.expect([pexpect.EOF, '.*(%|#|\$)'], timeout=2) except: testcase_Utils.pNote("Sending Ctrl+C") if "client had error while displaying results" not in gnmi_obj.before: if system_name: result = gnmi_obj.before.strip().strip("^C") testcase_Utils.pNote(result) else: result = gnmi_obj.after.strip().strip("^C") testcase_Utils.pNote(result) status = True return status, result
def increase_value(self, key, status, max_value, max_status): """ write to a value in datarepo and return status if value == max, return max_status instead """ value = data_Utils.get_object_from_datarepository(key) if key == False: num = 1 else: if isinstance(value, int): num = value + 1 else: num = 1 if num == int(max_value): status = max_status if status == "pass": status = True elif status == "fail": status = False else: raise Exception( "This is raised in ci_regression_actions.increase_value") output_dict = {key: num} return status, output_dict
def create_sub_tmp_file(self, system_name="", filename="", delete="yes"): """ Create temp file for parallel execution test """ path = data_Utils.get_object_from_datarepository( "parallel_exec_tmp_dir") if system_name != "" and filename == "": filename = data_Utils.getSystemData(self.datafile, system_name, "filename") elif system_name == "" and filename == "": pNote("No system or filename found, needs to provide at least one", "error") f = open(os.path.join(path, filename), "w") f.write("This is a test string") f.close() time.sleep(10) status = False if delete == "yes": try: file_Utils.delFile(os.path.join(path, filename)) status = True except OSError: pNote( "Cannot remove tmp file, no write access to {}".format( path), "error") else: status = True return status
def delete_tmp_dir(self): """ Delete temp directory for parallel execution test """ path = data_Utils.get_object_from_datarepository( "parallel_exec_tmp_dir") return file_Utils.delFolder(path)
def get_element_from_config_file(config_file, element_tag, child_tag, is_locator_type="no", default=False): """ Gets default locators from json file """ locator_types = ("xpath", "id", "css", "link", "partial_link", "tag", "class", "name") wt_datafile = data_Utils.get_object_from_datarepository('wt_datafile') if wt_datafile: config_file = file_Utils.getAbsPath(config_file, os.path.dirname(wt_datafile)) if config_file is not None and config_file is not False: if child_tag in locator_types: if element_tag is not None: final_value = get_json_value_from_path(element_tag + "/" + child_tag, config_file, default) else: final_value = get_json_value_from_path(child_tag, config_file, default) if is_locator_type == "yes": final_value = [child_tag, final_value] else: if child_tag is not None: if element_tag is not None: final_value = get_json_value_from_path(element_tag + "/" + child_tag, config_file, default) else: final_value = get_json_value_from_path(child_tag, config_file, default) else: if element_tag is not None: final_value = get_default_tag_for_locs(config_file, element_tag, locator_types) else: final_value = get_default_tag_for_locs(config_file, child_tag, locator_types) else: if is_locator_type == "yes": final_value = [None, None] else: final_value = None return final_value
def create_display(): """ Create a virtual display Default size is 1920x1080 as smaller resolution may cause problem in firefox """ status = True if data_Utils.get_object_from_datarepository("headless_display"): return status try: from pyvirtualdisplay import Display # Selenium has problem with firefox in virtualdisplay if resolution is low display = Display(visible=0, size=(1920, 1080)) display.start() print_info("Running in headless mode") except ImportError: print_error("pyvirtualdisplay is not installed in order " "to launch the browser in headless mode") status = False except Exception as err: print_error( "Encountered Exception: {0}, while trying to launch the browser" " in headless mode".format(err)) status = False return status
def cs_create_route_in_reservation(self, system_name, reservation_name, source_resource_full_path, target_resource_full_path, override_active_routes, mapping_type, max_hops, route_alias, is_shared): """Creates routes between the specified source and target resources. :Arguments: 1. system_name(string) = Name of the UAP system from the datafile 2. reservation_name(string) = Specify the name of the reservation. 3. source_resource_full_path(string) = Specify the source resource full path 4. target_resource_full_path(string) = Specify the target resource full path 5. mapping_type(string) = Specify bi-directional or uni-directional as the mapping type 6. max_hops(integer) = The maximum number of allowed hops. 7. route_alias(string) = Specify the route alias 8. override_active_routes(bool) = Specify whether the new route can override existing routes. 9. is_shared(bool) = Specify whether these routes are shared. :Returns: 1. status(bool) = True/False """ wdesc = "Create Route In Reservation in CloudShell API Host" testcase_Utils.pSubStep(wdesc) testcase_Utils.pNote(file_Utils.getDateTime()) testcase_Utils.pNote( "cs_create_route_in_reservation, cs obj-{}".format(cloud_shell), "info") status = False cs_res_id = data_Utils.get_object_from_datarepository( system_name + "_" + reservation_name + "_reservationId") try: xml_resp = cloud_shell.CreateRouteInReservation( cs_res_id, source_resource_full_path, target_resource_full_path, override_active_routes, mapping_type, int(max_hops), route_alias, is_shared) if xml_resp is not None: testcase_Utils.pNote( "\n\n *** Cloudshell Create Route In" " Reservation successfull for \"{}\"\n".format( reservation_name), "info") status = True else: testcase_Utils.pNote( "\n\n *** Cloudshell Create Route In" " Reservation failed for \"{}\"".format(reservation_name), "warning") except Exception as exception: print_exception(exception) testcase_Utils.report_substep_status(status) return status
def tmp_file_count(self, int_count): """ count how many files are under the temp dir """ time.sleep(5) path = data_Utils.get_object_from_datarepository( "parallel_exec_tmp_dir") content = os.listdir(path) pNote(content) pNote(str(len(content)) + str(int_count)) return len(content) == int_count
def scp_client_ca(self, cmd_string, passwd, external_system=None, external_system_session=None): """ Perform scp/sftp operation :param cmd_string: scp/sftp command string :param passwd: remote system password :param external_system: external system mentioned in the data file :param external_system_session: external system session if any :return: True or False """ status = False if external_system == None: child = pexpect.spawn(cmd_string) else: session_id = data_Utils.get_session_id(external_system, external_system_session) child = data_Utils.get_object_from_datarepository(session_id) child.sendline(cmd_string) while 1: try: u_index = child.expect(['password', 'Permission denied', pexpect.EOF, 'want to continue connecting', '.*100%.*(%|#|\$)', 'Connection reset by peer', pexpect.TIMEOUT], timeout=20) if u_index == 0: child.sendline(passwd) testcase_Utils.pNote(child.before+child.match.group(0)) if u_index == 1: status = False testcase_Utils.pNote(child.before+child.match.group(0)+child.after) break if u_index == 2: testcase_Utils.pNote(child.before) if "100%" in child.before: status = True break if u_index == 4: testcase_Utils.pNote(child.before+child.match.group(0)) if "100%" in child.before+child.match.group(0): status = True break if u_index == 3: testcase_Utils.pNote(child.before+child.match.group(0)+child.after) child.sendline('yes') if u_index == 5: testcase_Utils.pNote(child.before+child.match.group(0)) status = False break except: testcase_Utils.pNote("File Copy Failed", "error") status = False break if status: testcase_Utils.pNote("Client certificate copied Successfully.") else: testcase_Utils.pNote("Client certificate copy Failed.", "error") return status
def check_tmp_file_exists(self, system_name="", filename=""): """ check if temp folder exist in the parallel execution result tmp dir """ if system_name != "" and filename == "": filename = data_Utils.getSystemData(self.datafile, system_name, "filename") elif system_name == "" and filename == "": pNote("No system or filename found, needs to provide at least one", "error") path = data_Utils.get_object_from_datarepository( "parallel_exec_tmp_dir") path = os.path.join(path, filename) return file_Utils.fileExists(path)
def cs_remove_route_from_reservation(self, system_name, reservation_name, first_endpoint, second_endpoint, mapping_type): """Disconnects two endpoints and removes the mapped route between them :Arguments: 1. system_name(string) = Name of the UAP system from the datafile 2. reservation_name(string) = Specify the name of the reservation. 3. first_endpoint(str) = The first endpoint of the two end points 4. second_endpoint(str) = The second endpoint of the two end points 5. mapping_type(string) = Specify bi-directional or uni-directional as the mapping type :Returns: 1. status(bool) = True/False """ wdesc = "Remove Route From Reservation in CloudShell API Host" testcase_Utils.pSubStep(wdesc) testcase_Utils.pNote(file_Utils.getDateTime()) testcase_Utils.pNote( "cs_remove_route_from_reservation, cs obj-{}".format(cloud_shell), "info") status = False cs_res_id = data_Utils.get_object_from_datarepository( system_name + "_" + reservation_name + "_reservationId") list_of_endpoints = [first_endpoint, second_endpoint] try: xml_resp = cloud_shell.RemoveRoutesFromReservation( cs_res_id, list_of_endpoints, mapping_type) if xml_resp is not None: testcase_Utils.pNote( "\n\n *** Cloudshell Remove Route From" " Reservation successfull for \"{}\"\n".format( reservation_name), "info") status = True else: testcase_Utils.pNote( "\n\n *** Cloudshell Remove Route From" " Reservation failed for \"{}\"".format(reservation_name), "warning") except Exception as exception: print_exception(exception) testcase_Utils.report_substep_status(status) return status
def update_list_key_in_data_repository(self, key, value, status="True"): """ This keyword will update an existing key in the data repository :param key: key name :param value: value to be updated :param: status: kw will pass/fail accordingly :return: status (bool), updated_dict (dict) """ wdesc = "This keyword will update an existing key in the data repository" pNote(wdesc) status = status.lower() != "false" data = data_Utils.get_object_from_datarepository(key) data.append(value) updated_dict = {key: data} pNote("Updating {0} value wih {1}".format(key, format(value))) return status, updated_dict
def verify_text(self, **kwargs): """stores the text from element in data repository with var variable and verifies if it is same as expected if expected is provided :Arguments: 1. var = variable in which to store the text 2. expected = value to compare with as a list separated by comma """ status = True value = get_object_from_datarepository(kwargs.get('var')) expected = kwargs.get('expected').split(',') if value not in expected: print_error("element text expected to be <<{}>> " "but found to be <<{}>>".format(', '.join(expected), value)) status = False return status
def _get_obj_session(details_dict, obj_session, kw_system_name, index): """If system name is provided in testdata file get the session of that system name and use it or use the current obj_session""" value = False kw_system_nameonly, _ = data_Utils.split_system_subsystem(kw_system_name) td_sys = details_dict["sys_list"][index] # To get the session name if it is provided as part of sys tag in td td_sys_split = td_sys.split('.') if isinstance(td_sys, str) else [] if len(td_sys_split) == 2: td_sys = td_sys_split[0] session = td_sys_split[1] else: session = details_dict["session_list"][index] td_sys = td_sys.strip() if isinstance(td_sys, str) else td_sys td_sys = {None: False, False: False, "": False}.get(td_sys, td_sys) session = session.strip() if isinstance(session, str) else session session = {None: None, False: None, "": None}.get(session, session) if td_sys: system_name = kw_system_nameonly + td_sys if td_sys.startswith("[") \ and td_sys.endswith("]") else td_sys session_id = data_Utils.get_session_id(system_name, session) obj_session = data_Utils.get_object_from_datarepository(session_id) if not obj_session: pNote("Could not find a valid connection for "\ "system_name={}, session_name={}".format(system_name, session), "error") value = False else: value = obj_session # details_dict = # _update_details_dict(system_name, datafile, details_dict, var_sub) else: # print obj_session value = obj_session system_name = kw_system_name pNote("System name\t: {0}".format(system_name)) if details_dict["sys_list"][index] is not None: kw_system_name = details_dict["sys_list"][index] return value, kw_system_name, details_dict
def math_decision(exec_condition, exec_cond_var, operator): """ Handle the math operator decision :param: exec_condition: value from data_repo to be compared exec_cond_var: user provided value to be compared operator: math operator in plain English :return: True if operator condition match for repo value on left and user value on right Else False """ operator = operator.lower() if operator in MATH_OPERATION: return MATH_OPERATION[operator](get_object_from_datarepository(exec_condition), exec_cond_var) pNote("Unknown error occur when deciding value, please check condition value of the step", "Error") return False
def log_message(self, message=None, type="INFO", list_message=None, dict_message=None): """Keyword to print the given message. :Arguments: 1. type = message severity level INFO,WARN,DEBUG,ERROR are supported values 2. message = message to be printed, 3. list_message = list of messages to be printed, 4. dict_message = dict with key 'custom message from user' and value 'name in data repo' one of the arguments message, list_message, dict_message is mandatory. :Returns: 1. True (boolean), this keyword always returns True. Don't want to fail the test based on this keyword. """ wdesc = "keyword to print the given log message" Utils.testcase_Utils.pNote(wdesc) if not (message or list_message or dict_message): print_error("Please specify atleast one message for printing") Utils.testcase_Utils.pNote( "Please specify atleast one message for printing") return True if not self.map_function.get(type): print_error("type : " + type + " is not supported") Utils.testcase_Utils.pNote("type : " + type + " is not supported") return True if message: self.map_function[type](message) if list_message: _ = [self.map_function[type](message) for message in list_message] if dict_message: for message, value in dict_message.iteritems(): value = get_object_from_datarepository(value) if value is not None: self.map_function[type](message + ": " + value) else: self.map_function[type](message) return True
def fetch_in_repo(self, datavar): """Prints value of datavar in datarepository :Argument: 1. datavar = Key to fetch value in data_repository, this could be dot separated to fetch in nested fashion i.e., if var is k1.k2.k3 then the value would be fetched as a value in datarepository[k1][k2][k3] :Returns: status (boolean True) """ wDesc = "Print the value of given key in data_repository " Utils.testcase_Utils.pNote(wDesc) status = True pass_msg = "Value: {0} is stored in a Key: {1} of Warrior data_repository" value = get_object_from_datarepository(datavar) if value: print_info(pass_msg.format(value, datavar)) return status #always returns True, just prints warning if key is not present.
def compare_hybrid_tc_result(self, input_file): """ It takes the input file path which is the expected result and compares with the log file and returns True if both matches else False and prints the difference to console. Arguments: input_file: It takes expected result file path as input """ wdesc = "Compares the test case result file with expected result file" pNote(wdesc) output_file = data_Utils.get_object_from_datarepository("output_file") f = open(output_file) f1 = open(input_file) output_content = f.readlines() input_content = f1.readlines() if output_content == input_content: return True output_set = set([tuple([i]) for i in output_content]) input_set = set([tuple([i]) for i in input_content]) output_set_count = len(output_set) input_set_count = len(input_set) if output_set_count > input_set_count: diff = output_set.difference(input_set) result_content = output_content else: diff = input_set.difference(output_set) result_content = input_content pNote( "**************The difference between the files is******************" ) for j in diff: s = str(j[0]) index = result_content.index(s) last_index = result_content.index("****************************\n", index) start_index = last_index - 2 for i in range(start_index - 1, last_index + 1): pNote(result_content[i].strip("\n")) return False
def verify_delta(self, delta_key, int_num, float_min_val): """ test keyword created for runmode_timer Compare a list of delta to a minimum value This is used to ensure runmode is correctly waiting for a minimum amount of time (float_min_val) :Argument: delta_key = key name for the list of delta int_num = number of delta required in list of delta float_min_val = minimum value of each delta """ status = False stored_delta = data_Utils.get_object_from_datarepository(delta_key) if stored_delta: if len(stored_delta) != int_num: pNote("not enough delta value stored in list", "Error") else: status = all([x >= float_min_val for x in stored_delta]) if not status: pNote("Delta: {} not meet minimum value {}". \ format(str(stored_delta), float_min_val)) return status
def write_live_results(self, junitObj, givenPath, is_final): """ build the html givenPath: added this feature in case of later down the line calling from outside junit file ( no actual use as of now ) """ if junitObj: self.junit_file = junitObj self.junit_root = xml_Utils.getRoot(self.junit_file) if givenPath: self.givenPath = givenPath self.set_line_objs() html = '' for item in self.lineObjs: html += item.html if is_final is True: #html += '<div class="complete"></div>' pass live_html_dict = data_Utils.get_object_from_datarepository( 'live_html_dict', verbose=False) if live_html_dict: livehtmllocn = live_html_dict['livehtmllocn'] live_html_iter = live_html_dict['iter'] self.create_live_table(html, livehtmllocn, live_html_iter) html = self.merge_html(html) elem_file = open(self.get_path(), 'w') elem_file.write(html) elem_file.close() self.lineObjs = [] if is_final is True: print_info("++++ Results Summary ++++") print_info( "Open the Results summary file given below in a browser to " "view results summary for this execution") print_info("Results sumary file: {0}".format(self.get_path())) print_info("+++++++++++++++++++++++++")
def verify_list_key_value_in_data_repo(self, key, expected_value): """ This keyword will update an existing key in the data repository :param key: key name :param value: value to be updated :return: status (bool), updated_dict (dict) """ wdesc = "This keyword will verify an existing key's value" pNote(wdesc) status = False data = data_Utils.get_object_from_datarepository(key) pNote("{1} Value (as stored in Data Repository): {0}".format( data, key)) compare_value = [x.strip() for x in expected_value.split(",")] pNote("Expected Value: {0}".format(compare_value)) if len(data) == len(compare_value): for sub_data, sub_compare in zip(data, compare_value): if sub_data != sub_compare: break else: status = True if not status: pNote("Expected Value and Existing Value do not match", "error") return status
def store_in_repo(self, datavar=None, datavalue=None, type='str', filepath=None, jsonkey="repo_variables"): """Stores datavalue in datavar of datarepository :Argument: 1. datavar = Key to be used to store datavalue in data_repository, this could be dot separated to store in nested fashion i.e., if var is k1.k2.k3 then the data value would be stored as a value in datarepository[k1][k2][k3] 2. datavalue = Value to be stored 3. type = Type of datavalue(string/int/float) 4. filepath = Json file where datarepository variables are defined. It is to store multiple key,value pairs in datarepository. 5. jsonkey = The key where all the REPO variables & values are defined in the filepath Sample JSON file: { "repo_variables": { "var1": {"type": "int", "value": "10"}, "var2.var3": {"value": "10"}, "var4.var5": "1" }, "user_defined_tag":{ "var6" : {"type": "int", "value": "40"} } } All three formats in the above sample block are allowed. If 'type' is not provided, value will be converted as string by default. """ def get_dict_to_update(var, val): """ The function creates a dictionary with Variable and value. If Variable has "." separated keys then the value is updated at appropriate level of the nested dictionary. :param var: Dictionary Key or Key separated with "." for nested dict keys. :param val: Value for the Key. :return: Dictionary """ dic = {} if '.' in var: [key, value] = var.split('.', 1) dic[key] = get_dict_to_update(value, val) else: dic[var] = val return dic status = False pass_msg = "Value: {0} is stored in a Key: {1} of Warrior data_repository" if datavar is not None and datavalue is not None: if type == 'int': datavalue = int(datavalue) elif type == 'float': datavalue = float(datavalue) dict_to_update = get_dict_to_update(datavar, datavalue) update_datarepository(dict_to_update) print_info(pass_msg.format(datavalue, datavar)) status = True if filepath is not None: testcasefile_path = get_object_from_datarepository( 'wt_testcase_filepath') try: filepath = getAbsPath(filepath, os.path.dirname(testcasefile_path)) with open(filepath, "r") as json_handle: json_doc = json.load(json_handle) if jsonkey in json_doc: repo_dict = json_doc[jsonkey] for var_key, var_value in list(repo_dict.items()): if isinstance(var_value, dict): if var_value.get('type') == 'int': value = int(var_value['value']) elif var_value.get('type') == 'float': value = float(var_value['value']) else: value = str(var_value['value']) else: value = str(var_value) dict_to_update = get_dict_to_update(var_key, value) update_datarepository(dict_to_update) print_info(pass_msg.format(value, var_key)) else: print_error( 'The {0} file is missing the key ' '\"repo_variables\", please refer to ' 'the Samples in Config_files'.format(filepath)) status = True except ValueError: print_error('The file {0} is not a valid json ' 'file'.format(filepath)) except IOError: print_error('The file {0} does not exist'.format(filepath)) except Exception as error: print_error('Encountered {0} error'.format(error)) if (type is None or datavalue is None) and filepath is None: print_error('Either Provide values to arguments \"datavar\" & ' '\"datavalue\" or to argument \"filepath\"') return status
def check_opt_values_from_datafile( self, langs=['Sanskrit', 'Tamil'], strvar="I am a default variable", states="wtag=states", system_name="sys_wtag", currencys={'USA': 'USD'}, ramspace=False, configfile="../../config_files/check_file_type", intvar=496): """Verify the datatype of the value read from the datafile using either the tag or wtag feature :Argument: 1. system_name = system name in the datafile 2. strvar = string variable 3. langs = list variable (should get from data file using wtag) 4. states = tuple variable 5. currencys = dict variable 6. ramspace = boolean variable 7. configfile = file variable 8. intvar = int variable """ def check_type(var, varname, datatype): """check that vars are of correct datatype """ vartype = type(var) status = True if vartype is not datatype: pNote( '{} is expected to be {} type, but found to be of ' '{} type'.format(varname, datatype, vartype), "error") status = False return status status = True datafile = Utils.config_Utils.datafile tc_filepath = os.path.dirname( data_Utils.get_object_from_datarepository('wt_testcase_filepath')) # this block checks if strvar is string type status = check_type(strvar, "strvar", str) and status # this block checks if langs is list type status = check_type(langs, "langs", list) and status # this block checks if states is tuple type status = check_type(states, "states", tuple) and status # this block checks if currencys is dict type status = check_type(currencys, "currencys", dict) and status # this block checks if ramspace is bool type status = check_type(ramspace, "ramspace", bool) and status file_err = '{} is not a file, please check' try: # this checks if configfile and anotherfile are valid files # by getting the absolute path of the file if not os.path.isabs(configfile): configfile = file_Utils.getAbsPath(configfile, tc_filepath) if not os.path.isfile(configfile): pNote(file_err.format(configfile), "error") except AttributeError: pNote('configfile and anotherfile are expected to be files', "error") pNote('type of configfile is {}'.format(type(configfile)), "error") status = False if type(intvar) is str and intvar.startswith('tag'): intvar = data_Utils.resolve_argument_value_to_get_tag_value( datafile, system_name, intvar) else: status = check_type(intvar, "intvar", int) and status return status
def cs_add_topology_to_reservation(self, system_name, reservation_name, topology_full_path): """ Create the reservation and add topology to the reservation in Cloudshell :Datafile usage: NA :Arguments: 1. system_name(string) = Name of the UAP system from the datafile 2. reservation_name(string) = Specify the name of the reservation. 3. topology_full_path(string) = Specify the full topology name. Include the full path from the root to the topology, separated by slashes. For example: FolderName/Topologies/TopologyName :Returns: 1. status(bool)= True/False """ wdesc = "Create the reservation and add Topology to Reservation in CloudShell API Host" testcase_Utils.pSubStep(wdesc) testcase_Utils.pNote(file_Utils.getDateTime()) status = True res_key = "{0}_{1}_cs_rsrv_details".format(system_name, reservation_name) res_details = data_Utils.get_object_from_datarepository(res_key) output_dict = {} if res_details: username = res_details["username"] reservation_name = res_details["reservation_name"] duration = int(res_details["duration"]) notify_on_start = res_details["notify_on_start"] notify_on_end = res_details["notify_on_end"] notify_mins_before_end = int(res_details["notify_mins_before_end"]) try: xml_resp = cloud_shell.CreateImmediateTopologyReservation( reservation_name, username, duration, notify_on_start, notify_on_end, notify_mins_before_end, topology_full_path) if xml_resp is not None: reservation_id = xml_resp.Reservation.Id output_dict = { 'domain_id': cloud_shell.domain, '{0}_{1}_reservationId'.format(system_name, reservation_name): reservation_id } testcase_Utils.pNote("\n\n *** Cloudshell CreateReservation" " successful for ResName-\"{}\" ResId-{}\n".\ format(reservation_name, output_dict['{0}_{1}_reservationId'.\ format(system_name, reservation_name)]), "info") testcase_Utils.pNote("\n\n *** Cloudshell Add Topology \"{}\"" " successful for \"{}\"\n".\ format(topology_full_path, reservation_name), "info") else: testcase_Utils.pNote( "\n\n *** Cloudshell CreateReservation" " failed for \"{}\"".format(reservation_name), "warning") testcase_Utils.pNote("\n\n *** Cloudshell Add Topology \"{}\"" " failed for \"{}\"".\ format(topology_full_path, reservation_name), "warning") status = False except Exception as exception: print_exception(exception) status = False else: pNote("Details for reservation_name={0}, for sysem={1} "\ "not found in data repository. Please make sure "\ "to execute the keyword '{2}' before this"\ "step".format(reservation_name, system_name, 'cs_create_reservation'), "warning") status = False testcase_Utils.report_substep_status(status) return status, output_dict
def _make_ff(self, webdriver_remote_url, desired_capabilites, profile_dir, **kwargs): """Create an instance of firefox browser""" binary = kwargs.get("binary", None) gecko_path = kwargs.get("gecko_path", None) # gecko_log is the absolute path to save geckodriver log gecko_log = kwargs.get("gecko_log", None) proxy_ip = kwargs.get("proxy_ip", None) proxy_port = kwargs.get("proxy_port", None) ff_profile = None # if firefox is being used with proxy, set the profile here # if firefox_proxy details are not given, set profile_dir # as the ff_profile. if proxy_ip is not None and proxy_port is not None: ff_profile = self.set_firefox_proxy(profile_dir, proxy_ip, proxy_port) else: ff_profile = profile_dir log_dir = get_object_from_datarepository("wt_logsdir") if \ gecko_log in [None, False] else gecko_log log_dir = os.path.join(log_dir, "gecko_"+kwargs.get("browser_name", "default")+".log") browser = None try: if webdriver_remote_url: browser = self._create_remote_web_driver( webdriver.DesiredCapabilities.FIREFOX, webdriver_remote_url, desired_capabilites, ff_profile) else: optional_args = {} ff_capabilities = webdriver.DesiredCapabilities.FIREFOX # This is for internal testing needs...some https cert is not secure # And firefox will need to know how to handle it ff_capabilities['acceptInsecureCerts'] = True if binary not in [False, None]: if not fileExists(binary): print_warning("Given firefox binary '{}' does not exist, default " "firefox will be used for execution.".format(binary)) binary = None else: print_info("No value given for firefox binary, default " "firefox will be used for execution.") # Force disable marionette, only needs in Selenium 3 with FF ver < 47 # Without these lines, selenium may encounter capability not found issue # https://github.com/seleniumhq/selenium/issues/2739 # https://github.com/SeleniumHQ/selenium/issues/5106#issuecomment-347298110 if self.get_firefox_version(binary) < LooseVersion("47.0.0"): ff_capabilities["marionette"] = False else: # gecko_log will only get generate if there is failure/error # Need to specify log_path for geckodriver log # Gecko driver will only launch if FF version is 47 or above optional_args["log_path"] = log_dir ffbinary = FirefoxBinary(binary) if binary is not None else None if gecko_path is not None: optional_args["executable_path"] = gecko_path browser = webdriver.Firefox(firefox_binary=ffbinary, capabilities=ff_capabilities, firefox_profile=ff_profile, **optional_args) except WebDriverException as err: if "executable needs to be in PATH" in str(err): print_error("Please provide path for geckodriver executable") elif "Expected browser binary location" in str(err): print_error("Please provide path of firefox executable") print_error(err) traceback.print_exc() except Exception as err: print_error(err) traceback.print_exc() if browser is None and\ any((LooseVersion(webdriver.__version__) < LooseVersion("3.5.0"), gecko_path is None)): print_info("Unable to create Firefox browser, one possible reason is because"\ "Firefox version >= 47.0.1 and Selenium version < 3.5"\ "In order to launch Firefox ver 47 and up, Selenium needs to be updated to >= 3.5"\ "and needs geckodriver > 0.16") return browser
def store_in_repo(self, datavar=None, datavalue=None, type='str', filepath=None, jsonkey="repo_variables", bool_store_all=False): """Stores datavalue in datavar of datarepository :Argument: 1. datavar = Key to be used to store datavalue in data_repository, this could be dot separated to store in nested fashion i.e., if var is k1.k2.k3 then the data value would be stored as a value in datarepository[k1][k2][k3] 2. datavalue = Value to be stored 3. type = Type of datavalue(string/int/float) 4. filepath = Json file where datarepository variables are defined. It is to store multiple key,value pairs in datarepository. 5. jsonkey = The key where all the REPO variables & values are defined in the filepath 6. bool_store_all = Set to True to store whole json file content to data repository. keys from the json file will be used as it is to store in repo if this value is set to True. default value is set to False. Sample JSON file: { "repo_variables": { "var1": {"type": "int", "value": "10"}, "var2.var3": {"value": "10"}, "var4.var5": "1" }, "user_defined_tag":{ "var6" : {"type": "int", "value": "40"} } } All three formats in the above sample block are allowed. If 'type' is not provided, value will be converted as string by default. """ status = False pass_msg = "Value: {0} is stored in a Key: {1} of Warrior data_repository" if datavar is not None and datavalue is not None: if type == 'int': datavalue = int(datavalue) elif type == 'float': datavalue = float(datavalue) dict_to_update = Utils.dict_Utils.get_dict_to_update(datavar, datavalue) update_datarepository(dict_to_update) print_info(pass_msg.format(datavalue, datavar)) status = True if filepath is not None: testcasefile_path = get_object_from_datarepository('wt_testcase_filepath') try: filepath = getAbsPath(filepath, os.path.dirname(testcasefile_path)) with open(filepath, "r") as json_handle: json_doc = json.load(json_handle) #if bool_store_all is set to True, all content of given json file will be #stored in data repository if isinstance(bool_store_all, bool) and bool_store_all is True: print_info("bool_store_all is set to True, all content of given" " json file will be stored in data repository") update_datarepository(json_doc) print_info("{0} dictionary stored in Warrior data_repository". format(json_doc)) status = True elif not isinstance(bool_store_all, bool): print_error("invalid value : {0} given for bool_store_all," "valid value: boolean True or False".format(bool_store_all)) status = False elif jsonkey in json_doc: dict_to_update = {} repo_dict = json_doc[jsonkey] for var_key, var_value in repo_dict.items(): if isinstance(var_value, dict): if var_value.get('type') == 'int': value = int(var_value['value']) elif var_value.get('type') == 'float': value = float(var_value['value']) else: value = str(var_value['value']) else: value = str(var_value) build_dict = Utils.dict_Utils.get_dict_to_update(var_key, value) Utils.dict_Utils.verify_key_already_exists_and_update\ (orig_dict=dict_to_update, new_dict=build_dict) update_datarepository(dict_to_update) print_info("{0} dictionary stored in Warrior data_repository".\ format(dict_to_update)) status = True else: print_error('The {0} file is missing the key ' '\"repo_variables\", please refer to ' 'the Samples in Config_files'.format(filepath)) status = True except ValueError: print_error('The file {0} is not a valid json ' 'file'.format(filepath)) except IOError: print_error('The file {0} does not exist'.format(filepath)) except Exception as error: print_error('Encountered {0} error'.format(error)) if (type is None or datavalue is None) and filepath is None: print_error('Either Provide values to arguments \"datavar\" & ' '\"datavalue\" or to argument \"filepath\"') return status
def set_env_var(self, var_key=None, var_value=None, filepath=None, jsonkey="environmental_variables", overwrite="yes"): """Create a temp environment variable, the value will only stay for the current Execution :Argument: var_key = key of the environment variable var_value = value of the environment variable filepath = Json file where Environmental variables are defined jsonkey = The key where all the ENV variable & values are defined With jsonkey arg, Users can call same file to set various ENV Variable overwrite = Yes-Will overwrite ENV variables set earlier via terminal or other means No -Will not overwrite the ENV variables set earlier with the ones passed through this keyword. Variable File : Sample environmental_variable file is available under Warriorspace/Config_file/Samples/Set_ENV_Variable_Sample.json """ overwrite = overwrite.upper() status = False if not any([var_key, var_value, filepath]): print_error( 'Either Provide values to arguments \"var_key\" & \"var_value\" or to ' 'argument \"filepath\"') if overwrite == "NO" and os.getenv(var_key): print_info("Using ENV variable {0} set earlier with " "value '{1}'".format(var_key, os.getenv(var_key))) elif var_key is not None and var_value is not None and overwrite in [ "YES", "NO" ]: os.environ[var_key] = var_value if os.environ[var_key] == var_value: print_info("Set ENV variable {0} with value '{1}'".format( var_key, var_value)) status = True else: print_error( 'The attribute overwrite can only accept values either yes or no' ) if filepath is not None: testcasefile_path = get_object_from_datarepository( 'wt_testcase_filepath') try: filepath = getAbsPath(filepath, os.path.dirname(testcasefile_path)) with open(filepath, "r") as json_handle: get_json = json.load(json_handle) if jsonkey in get_json: env_dict = get_json[jsonkey] for var_key, var_value in env_dict.items(): if overwrite == "NO" and os.getenv(var_key): print_info( 'Using ENV variable {0} set earlier with value ' '{1}'.format(var_key, os.getenv(var_key))) status = True elif overwrite in ["YES", "NO"]: os.environ[var_key] = str(var_value) if os.environ[var_key] == var_value: print_info( 'Setting ENV variable {0} with value ' '{1}'.format(var_key, var_value)) status = True else: print_error( 'The attribute overwrite can only accept values either ' 'yes or no') else: print_error( 'The {0} file is missing the key ' '\"environmental_variables\", please refer to ' 'the Samples in Config_files'.format(filepath)) status = False except ValueError: print_error('The file {0} is not a valid json ' 'file'.format(filepath)) status = False except IOError: print_error('The file {0} does not exist'.format(filepath)) status = False except Exception as error: print_error('Encountered {0} error'.format(error)) status = False return status
def start_threads(started_thread_for_system, thread_instance_list, same_system, unique_log_verify_list, system_name): """ This function iterates over unique_log_verify_list which consists of unique values gotten from monitor attributes and verify_on attributes If a system_name has a * against it, it indicates that the system is the same as the one on which the testcase is running. Thread would not be started for that system. :Returns: started_thread_for_system (list[str]) = Stores the system names for which threads were succesfully created thread_instance_list (list[str]) = stores the instances of thread created for corresponding system in the started_thread_for_system list, same_system (list[str]) = stores the system name which was the same as the system on which the TC is running without the trailing *, """ started_thread_for_system = [] thread_instance_list = [] same_system = [] for i in range(0, len(unique_log_verify_list)): if unique_log_verify_list[i] == system_name: temp_list = unique_log_verify_list[i].split(".") if len(temp_list) > 1: unique_log_verify_list[i] = data_Utils.get_session_id( temp_list[0], temp_list[1]) else: unique_log_verify_list[i] = data_Utils.get_session_id( temp_list[0]) same_system.append(unique_log_verify_list[i]) else: if unique_log_verify_list[i]: temp_list = unique_log_verify_list[i].split(".") if len(temp_list) > 1: unique_log_verify_list[i] = data_Utils.get_session_id( temp_list[0], temp_list[1]) else: unique_log_verify_list[i] = data_Utils.get_session_id( temp_list[0]) datarep_obj = get_object_from_datarepository( unique_log_verify_list[i]) if datarep_obj is False: print_info("{0} does not exist in data repository".format( unique_log_verify_list[i])) else: try: new_thread = ThreadedLog() new_thread.start_thread(datarep_obj) print_info("Collecting response from: {0}".format( unique_log_verify_list[i])) started_thread_for_system.append( unique_log_verify_list[i]) thread_instance_list.append(new_thread) except: print_info( "Unable to collect response from: {0}".format( unique_log_verify_list[i])) return started_thread_for_system, thread_instance_list, same_system