def checkdir_create_file(self, inpdir, dirname, colocate=False): """Check if dir is present, if dir present create subdir nd files if dir not present try to create dir, subdir and files if not able to create dir use Warrior frameworks default dir structure.""" dir_status = file_Utils.check_and_create_dir( inpdir) # creates tc_results dir if dir_status: try: if colocate: execdir = self.results_execdir else: execdir = file_Utils.createDir_addtimestamp( inpdir, self.nameonly) rfile = self.get_exec_file_by_type(dirname, execdir) except OSError: dir_status = False except Exception as exception: print_exception(exception) dir_status = False if dir_status is False: print_warning( "Creating directory/file(s) in provided path {0} failed. " "\n Hence Warrior Framework's default directory structure will be used " "for this execution.".format(inpdir)) execdir = self.create_def_exec_dir() # proj_exec_dir rfile = self.get_exec_file_by_type(dirname, execdir) return rfile, execdir
def diff_json_files(self, json_file1, json_file2): """ Takes two json files as inputs and calculates the difference between them. :Note: This method does an unsorted (or raw) comparison of the input json files For a sorted comparison use compare_json_files. :Returns: Returns a status and a comparison result(tuple or None) 1. No difference between two json objects. - status = True - comparison result = None. 2. Difference found between two json objects. - status=False - comparison result = a tuple of two lists list1= items in json1 but not json2 list2= items in json2 but not json1 3. If any exception encountered during comparison: - status=False - comaprison result = None """ try: json_object1 = json.load(open(json_file1, 'r')) json_object2 = json.load(open(json_file2, 'r')) result, result_list = self.diff_json_objects( json_object1, json_object2) except Exception as exception: print_exception(exception) result = False result_list = None return result, result_list
def get_key(encoded_key): """ Function that returns enc instance using secret key, passed to this function or read from secret.key file Args: encoded_key - False or base64 secrety key for encryption Return: IV - Random seed used to enc CIPHER - Enc instance used to for encryption """ IV = None CIPHER = None if encoded_key is False: try: MYFILE = Tools.__path__[0]+os.sep+"admin"+os.sep+'secret.key' with open(MYFILE, 'r') as myfileHandle: encoded_key = myfileHandle.read() except IOError: print_error("Could not find the secret.key file in Tools/Admin!") try: IV = Random.new().read(AES.block_size) CIPHER = AES.new(base64.b64decode(encoded_key), AES.MODE_CFB, IV) except Exception as e: print_exception("Some problem occured: {0}".format(e)) return IV, CIPHER
def compare_json_objects(self, json_object1, json_object2, case_conversion=False, write_diff_to_console=True, check_for_subset=False): """Compares two json objects and returns true or false This method recursively sorts all the lists and dictionaries in the json objects and then performs the comparison If user selects check_for_subset as True, then checks whether json_object2 is a subset of json_object1 or not by recursively sorting the json objects """ print_info("compare two json objects") result = False try: if case_conversion == True: json_object1 = self.case_conversion_json(json_object1) json_object2 = self.case_conversion_json(json_object2) if check_for_subset: json_object1 = self.sort_json_object(json_object1) json_object2 = self.sort_json_object(json_object2) result = all(item in json_object1.items() for item in json_object2.items()) else: result = self.sort_json_object( json_object1) == self.sort_json_object(json_object2) if not result and write_diff_to_console: self.diff_json_objects(json_object1, json_object2) except Exception as exception: print_exception(exception) return result
def cs_get_topo_details(self, topology_path): """To get the Cloudshell topology details for a given path :Datafile usage: NA :Arguments: 1. topology_path(string) = Specify the full topology name. Include the full path from the root to the topology, separated by slashes. For example: FolderName/Topologies/TopologyName :Returns: 1. status(bool)= True/False """ wdesc = "To get the Cloudshell topology details for a given path" testcase_Utils.pSubStep(wdesc) testcase_Utils.pNote(file_Utils.getDateTime()) status = False testcase_Utils.pNote("cs_get_topo_details, cs obj-{}".\ format(cloud_shell), "info") try: xml_resp = cloud_shell.GetTopologyDetails(topology_path) if xml_resp is not None: testcase_Utils.pNote("\n\n *** Get Topolopy \"%s\" successfull\n"\ % (topology_path), "info") status = True else: testcase_Utils.pNote("\n\n *** Get Topolopy \"%s\" failed\n"\ % (topology_path), "warning") except Exception as exception: print_exception(exception) testcase_Utils.report_substep_status(status) return status
def convert_csv_to_list_of_dict(input_csv_file): """ Takes the CSV file path as input and converts it to list of dictionaries Arguments: input_csv_file: it takes CSV file path as input Returns: Returns list of dictionaries where keys are column names and values are respective column values """ list_of_dict = [] try: if type(input_csv_file) == str: input_csv_file = open(input_csv_file, 'r') else: input_csv_file.seek(0) reader = csv.DictReader(input_csv_file) title = reader.fieldnames for row in reader: ordered_dict = OrderedDict() for i in range(len(title)): if not title[i]: continue ordered_dict[title[i]] = row[title[i]] list_of_dict.append(ordered_dict) input_csv_file.close() except Exception as exception: print_exception(exception) return list_of_dict
def main(testcase_filepath, data_repository={}, tc_context='POSITIVE', runtype='SEQUENTIAL_KEYWORDS', tc_parallel=False, auto_defects=False, suite=None, tc_onError_action=None, iter_ts_sys=None, queue=None, jiraproj=None): """ Executes a testcase """ tc_start_time = Utils.datetime_utils.get_current_timestamp() if Utils.file_Utils.fileExists(testcase_filepath): try: tc_status, data_repository = execute_testcase( testcase_filepath, data_repository, tc_context, runtype, tc_parallel, queue, auto_defects, suite, jiraproj, tc_onError_action, iter_ts_sys) except Exception as exception: print_exception(exception) tc_status = False else: print_error( "Testcase xml file does not exist in provided path: {0}".format( testcase_filepath)) tc_status = False if tc_parallel: queue.put(('ERROR', str(testcase_filepath), 'IMPACT', '0')) tc_duration = Utils.datetime_utils.get_time_delta(tc_start_time) return tc_status, tc_duration, data_repository
def collect_log(self, session): """ Collects the response from a connected session till the tread is stopped This function currently collects response from a connected pexpect spawn object using the pexpect read_nonblocking method """ response = " " while not self.stop_thread_flag: try: # default timeout for pexpect-spawn object is 30s string = session.read_nonblocking(1024, timeout=30) if isinstance(string, bytes): string = string.decode("utf-8") response = response + string time.sleep(0.5) self.data = response # continue reading data from 'session' until the thread is stopped except pexpect.TIMEOUT: continue except Exception as exception: print_exception(exception) break
def get_action_dirlist(driverpath): """ Get the list of action directories """ actions_package_list = [] try: if os.path.isfile(driverpath): lines = [] with open(driverpath, 'r') as fobj: lines = fobj.readlines() lines_as_string = ''.join(lines) search_string = re.compile(r'package_list.*=.*\]', re.DOTALL | re.MULTILINE) match = re.search(search_string, lines_as_string) if match: match_string = match.group() actions_package_list = match_string.split('[')[1].split( ']')[0].split(',') return actions_package_list else: print(("file {0} does not exist".format(driverpath))) return actions_package_list except Exception as exception: print_exception(exception) return actions_package_list
def connect_ssh(self): """ Initiates SSH connection to target system using paramiko module. For nested SSH connections, session will be established via intermediate system. """ if self.paramiko is None: print_error("Paramiko is not installed, please install it") return self.port = self.port if self.port else "22" try: # for nested SSH session if self.conn_type and self.conn_type.upper() == "SSH_NESTED": pNote("Nested SSH connection is requested, first connecting to" " intermediate system - {}".format(self.via_ip)) self.via_host = self.paramiko.SSHClient() self.via_host.set_missing_host_key_policy( self.paramiko.AutoAddPolicy()) self.via_host.connect(self.via_ip, port=self.via_port, username=self.via_username, password=self.via_password, timeout=self.via_timeout) self.via_transport = self.via_host.get_transport() dest_addr = (self.ip, self.port) local_addr = ('127.0.0.1', 22) self.via_channel = self.via_transport.open_channel( "direct-tcpip", dest_addr, local_addr) pNote("Connection to intermediate system - {} " "is successful".format(self.via_ip)) else: self.via_channel = None self.target_host = self.paramiko.SSHClient() self.target_host.set_missing_host_key_policy( self.paramiko.AutoAddPolicy()) self.target_host.connect(self.ip, port=self.port, username=self.username, password=self.password, timeout=self.timeout, sock=self.via_channel) if self.logfile is not None: # paramiko logging level is DEBUG(default) self.paramiko.util.log_to_file(self.logfile) # self.conn_string = self.target_host.get_transport().get_banner() # Use invoke_shell option to get conn_string value self.channel = self.target_host.invoke_shell() self.conn_string = self.channel.recv(9999).decode("utf-8") except Exception as exception: self.target_host = None print_exception(exception)
def cs_create_route_in_reservation(self, system_name, reservation_name, source_resource_full_path, target_resource_full_path, override_active_routes, mapping_type, max_hops, route_alias, is_shared): """Creates routes between the specified source and target resources. :Arguments: 1. system_name(string) = Name of the UAP system from the datafile 2. reservation_name(string) = Specify the name of the reservation. 3. source_resource_full_path(string) = Specify the source resource full path 4. target_resource_full_path(string) = Specify the target resource full path 5. mapping_type(string) = Specify bi-directional or uni-directional as the mapping type 6. max_hops(integer) = The maximum number of allowed hops. 7. route_alias(string) = Specify the route alias 8. override_active_routes(bool) = Specify whether the new route can override existing routes. 9. is_shared(bool) = Specify whether these routes are shared. :Returns: 1. status(bool) = True/False """ wdesc = "Create Route In Reservation in CloudShell API Host" testcase_Utils.pSubStep(wdesc) testcase_Utils.pNote(file_Utils.getDateTime()) testcase_Utils.pNote( "cs_create_route_in_reservation, cs obj-{}".format(cloud_shell), "info") status = False cs_res_id = data_Utils.get_object_from_datarepository( system_name + "_" + reservation_name + "_reservationId") try: xml_resp = cloud_shell.CreateRouteInReservation( cs_res_id, source_resource_full_path, target_resource_full_path, override_active_routes, mapping_type, int(max_hops), route_alias, is_shared) if xml_resp is not None: testcase_Utils.pNote( "\n\n *** Cloudshell Create Route In" " Reservation successfull for \"{}\"\n".format( reservation_name), "info") status = True else: testcase_Utils.pNote( "\n\n *** Cloudshell Create Route In" " Reservation failed for \"{}\"".format(reservation_name), "warning") except Exception as exception: print_exception(exception) testcase_Utils.report_substep_status(status) return status
def convert_xml_to_csv(input_file, mapping_file=None, output_csv_file_path=None, overwrite="yes"): """ It takes xml file path as input and converts to csv. Arguments: 1. input_file: Takes xml file path as input 2. mapping_file: If a mapping file path is given, it is used to map columns with the meaningful name as recognized by the user else the tags in the xml file will be used as column names in the csv file. 3. output_csv_file_path: If user gives the output_csv_file_path, creating an csv file in that path else creating csv file in the path from where he have given xml file. Returns: Returns output csv file path. """ count = 0 try: dict_response = xml_Utils.convert_xml_to_list_of_dict(input_file) if mapping_file: mapping_dict = data_Utils.get_credentials(mapping_file, 'mapping_scheme') mapping_dictionary = {v: k for k, v in mapping_dict.items()} else: mapping_dictionary = {} if output_csv_file_path: output_csv_file = output_csv_file_path else: output_csv_file = input_file.replace(".xml", ".csv") if overwrite == "no": output_csv_file = file_Utils.addTimeDate(output_csv_file) f = open(output_csv_file, 'wb+') csvwriter = csv.writer(f) for element in dict_response: if count == 0: header = list(element.keys()) for index, val in enumerate(header): for key, value in mapping_dictionary.items(): if val == value: header[index] = key csvwriter.writerow(header) count += 1 csvwriter.writerow(list(element.values())) f.close() except Exception as exception: print_exception(exception) output_csv_file = None return output_csv_file
def rem_nonprintable_ctrl_chars(self, txt): """Remove non_printable ascii control characters """ #Removes the ascii escape chars try: txt = re.sub(r'[^\x20-\x7E|\x09-\x0A]', '', txt) # remove non-ascii characters # txt = repr(txt)[1:-1] except Exception as exception: print_exception(exception) return txt
def convert_excel_to_csv(input_excel_file, output_csv_file_path=None, return_csv_file=False): """ Takes the excel file path as input and converts into csv file and if we select return_csv_file as True returns csv file else returns csv file object Arguments: 1. input_excel_file: It is a excel file path which is to be converted into csv file 2. output_csv_file_path: If user gives the output csv path, then creating csv file at that path else creating a csv file in the directory from where he have given excel file. 3. return_csv_file: If the user selects return_csv_file as True, returning the output csv file else returning the object. Returns: Returns the csv file path if user selects return_csv_file as True else returns the object. """ try: if output_csv_file_path is None: if ".xlsx" in input_excel_file: ret_csv_file = input_excel_file.replace(".xlsx", ".csv") else: ret_csv_file = input_excel_file.replace(".xls", ".csv") else: ret_csv_file = output_csv_file_path wb = xlrd.open_workbook(input_excel_file) sh = wb.sheet_by_index(0) csv_file = open(ret_csv_file, 'wb+') wr = csv.writer(csv_file, quoting=csv.QUOTE_ALL) for rownum in range(sh.nrows): row_val = sh.row_values(rownum) for index, value in enumerate(row_val): if sh.cell(rownum, index).ctype == 3: year, month, day, hour, minute, sec = xlrd.xldate_as_tuple( value, wb.datemode) date_format = "%02d/%02d/%04d" % (month, day, year) row_val[index] = date_format wr.writerow(row_val) if return_csv_file: csv_file.close() csv_file = ret_csv_file else: csv_file = csv_file except Exception as exception: print_exception(exception) csv_file = None return csv_file
def close_window(self, browser_instance=None): """close the current window """ status = True try: if browser_instance is not None: browser_instance.close() else: self.current_browser.close() except Exception as exception: print_exception(exception) status = False return status
def close_browser(self, browser_instance=None): """closes a browser session """ status = True try: if browser_instance is not None: browser_instance.quit() else: self.current_browser.quit() except Exception as exception: print_exception(exception) status = False return status
def maximize_browser_window(self, browser_instance=None): """Maximizes current browser window.""" status = True try: if browser_instance is not None: browser_instance.maximize_window() else: self.current_browser.maximize_window() except Exception as exception: print_exception(exception) status = False return status
def go_to(self, url, browser_instance=None): """Navigates the active browser instance to the provided URL.""" status = True try: print_info("Opening url '%s'" % url) if browser_instance is not None: browser_instance.get(url) else: self.current_browser.get(url) except Exception as exception: print_exception(exception) status = False return status
def cs_add_users_to_reservation(self, system_name, reservation_name, list_of_usernames): """Add one or more permitted users to the specified reservation. :Datafile usage: NA :Arguments: 1. system_name(string) = Name of the UAP system from the datafile 2. reservation_name(string) = Specify the name of the reservation. 3. list_of_usernames(list) = list of usernames to permit access to reservation. For example: To add many users to access the reservation list_of_usernames = ['user1','user2','userx'] :Returns: 1. status(bool)= True/False """ wdesc = "Add one or more permitted users to the reservation" testcase_Utils.pSubStep(wdesc) testcase_Utils.pNote(file_Utils.getDateTime()) testcase_Utils.pNote("cs_add_users_to_reservation, cs obj-{}".\ format(cloud_shell), "info") usernames = list_of_usernames[0:] testcase_Utils.pNote("cs_add_users_to_reservation, res_name-{},"\ "users-{}".format(reservation_name, usernames)) status = False cs_res_id = data_Utils.get_object_from_datarepository\ (system_name+"_"+reservation_name+"_reservationId") try: xml_resp = cloud_shell.AddPermittedUsersToReservation(cs_res_id,\ usernames) if xml_resp is not None: testcase_Utils.pNote("\n\n *** Cloudshell Add users \"{}\" to" "reservation successfull".\ format(usernames), "info") status = True else: testcase_Utils.pNote( "\n\n *** Cloudshell Add users \"{}\" to" "Reservation failed".format(usernames), "warning") except Exception as exception: print_exception(exception) testcase_Utils.report_substep_status(status) return status
def cs_remove_route_from_reservation(self, system_name, reservation_name, first_endpoint, second_endpoint, mapping_type): """Disconnects two endpoints and removes the mapped route between them :Arguments: 1. system_name(string) = Name of the UAP system from the datafile 2. reservation_name(string) = Specify the name of the reservation. 3. first_endpoint(str) = The first endpoint of the two end points 4. second_endpoint(str) = The second endpoint of the two end points 5. mapping_type(string) = Specify bi-directional or uni-directional as the mapping type :Returns: 1. status(bool) = True/False """ wdesc = "Remove Route From Reservation in CloudShell API Host" testcase_Utils.pSubStep(wdesc) testcase_Utils.pNote(file_Utils.getDateTime()) testcase_Utils.pNote( "cs_remove_route_from_reservation, cs obj-{}".format(cloud_shell), "info") status = False cs_res_id = data_Utils.get_object_from_datarepository( system_name + "_" + reservation_name + "_reservationId") list_of_endpoints = [first_endpoint, second_endpoint] try: xml_resp = cloud_shell.RemoveRoutesFromReservation( cs_res_id, list_of_endpoints, mapping_type) if xml_resp is not None: testcase_Utils.pNote( "\n\n *** Cloudshell Remove Route From" " Reservation successfull for \"{}\"\n".format( reservation_name), "info") status = True else: testcase_Utils.pNote( "\n\n *** Cloudshell Remove Route From" " Reservation failed for \"{}\"".format(reservation_name), "warning") except Exception as exception: print_exception(exception) testcase_Utils.report_substep_status(status) return status
def connect_target_via_host(self, target, user, auth, invoke_shell=False, log=None): """Forward the SSH connection to another target client :Argument: 1. target(string) - Name/ip target machine to be connected 2. user - username to connect 3. auth - password for the user 4. invoke_shell - open shell for passing commands :Returns: 1. target_session - Session object for target connection 2. status(bool)= True / False """ target_session = SSHComm(target, uid=user, pid=auth, logfile=log) status = False try: ssh_transport = self.sshobj.get_transport() channel = ssh_transport.open_channel("direct-tcpip", (target, 22), ('localhost', 0)) print_info("Connecting to target: {}".format(target)) target_session.sshobj.set_missing_host_key_policy(\ self.param.AutoAddPolicy()) target_session.sshobj.connect(target, port=22, username=user, password=auth, sock=channel) if target_session.logfile: target_session.log = open(self.logfile, 'w') print_info("Connection to target: {}successful".format(target)) if invoke_shell: print_info("Opening shell for {}".format(target)) target_session.invoke_shell() status = True except self.param.SSHException as exception: print_exception(exception) except Exception as exception: print_exception(exception) return target_session, status
def compare_json_files(self, json_file1, json_file2): """Compares two json files and returns true or false This method recursively sorts all the lists and dictionaries in the json files into a sorted json object and then performs the comparison""" print_info("compare two json files") result = False try: json_object1 = json.load(open(json_file1, 'r')) json_object2 = json.load(open(json_file2, 'r')) result = self.sort_json_object(json_object1) == self.sort_json_object(json_object2) if not result: self.diff_json_objects(json_object1, json_object2) except Exception as exception: print_exception(exception) return result
def execute_method_for_keyword(self): """Executes a method corresponding to keyword """ kwargs, kw_status = self.get_argument_as_keywords() print_info("The Arguments passed for the current Step is: '{0}'".format(kwargs)) if kw_status: # Execute the corresponding method try: """ Hi there, major issue here """ # print(dir(self.exec_obj)) # print(self.exec_obj.__class__) # print(self.exec_obj.__module__) keyword_result = self.exec_obj(**kwargs) except Exception as exception: trcback = print_exception(exception) keyword_result = ("EXCEPTION", trcback) self.data_repository = self.update_data_repository(self.keyword, keyword_result, self.data_repository) return self.data_repository
def execute_method_for_keyword(self): """Executes a method corresponding to keyword """ Utils.config_Utils.set_datarepository(self.data_repository) kwargs, kw_status = self.get_argument_as_keywords() if kw_status: # Execute the corresponding method method_loader = self.exec_obj.im_class() try: if WarriorCliClass.cmdprint: sessid = kwargs['system_name'] if 'session_name' in kwargs: sessid += kwargs['session_name'] print_info("{:*^80}".format(' System: ' + sessid + ' ')) self.data_repository.update({ sessid: sessid, sessid + '_td_response': {} }) keyword_result = self.exec_obj(method_loader, **kwargs) except Exception as exception: trcback = print_exception(exception) keyword_result = ("EXCEPTION", trcback) self.data_repository = self.update_data_repository( self.keyword, keyword_result, self.data_repository) return self.data_repository
def get_key(encoded_key): IV = None CIPHER = None if encoded_key is False: try: MYFILE = Tools.__path__[0]+os.sep+"admin"+os.sep+'secret.key' with open(MYFILE, 'r') as myfileHandle: encoded_key = myfileHandle.read() except IOError: print_error("Could not find the secret.key file in Tools/Admin!") try: IV = Random.new().read(AES.block_size) CIPHER = AES.new(base64.b64decode(encoded_key), AES.MODE_CFB, IV) except Exception as e: print_exception("Some problem occured: {0}".format(e)) return IV, CIPHER
def cs_add_topology_to_reservation(self, system_name, reservation_name, topology_full_path): """Add Topology to reservation in Cloudshell :Datafile usage: NA :Arguments: 1. system_name(string) = Name of the UAP system from the datafile 2. reservation_name(string) = Specify the name of the reservation. 3. topology_full_path(string) = Specify the full topology name. Include the full path from the root to the topology, separated by slashes. For example: FolderName/Topologies/TopologyName :Returns: 1. status(bool)= True/False """ wdesc = "Add Topology to Reservation in CloudShell API Host" testcase_Utils.pSubStep(wdesc) testcase_Utils.pNote(file_Utils.getDateTime()) testcase_Utils.pNote("cs_add_topology_to_reservation, cs obj-{}".\ format(cloud_shell), "info") status = False cs_res_id = data_Utils.get_object_from_datarepository\ (system_name+"_"+reservation_name+"_reservationId") try: xml_resp = cloud_shell.AddTopologyToReservation(cs_res_id,\ topology_full_path) if xml_resp is not None: testcase_Utils.pNote("\n\n *** Cloudshell Add Topology \"{}\"" " successfull for \"{}\"\n".\ format(topology_full_path, reservation_name), "info") status = True else: testcase_Utils.pNote("\n\n *** Cloudshell Add Topology \"{}\"" " failed for \"{}\"".\ format(topology_full_path, reservation_name), "warning") except Exception as exception: print_exception(exception) testcase_Utils.report_substep_status(status) return status
def maximize_browser_window(self, browser_instance=None, headless_mode=False): """Maximizes current browser window.""" status = True try: if browser_instance is None: browser_instance = self.current_browser # Need to distinguish whether browser is in headless mode or not # as maximize_window doesn't work in headless mode if headless_mode: browser_instance.set_window_size(1920, 1080) else: browser_instance.maximize_window() except Exception as exception: print_exception(exception) status = False return status
def cs_disconnect_routes(self, system_name, reservation_name, first_endpoint, second_endpoint): """Disconnects the routes in the cloud shell :Arguments: 1. system_name(string) = Name of the UAP system from the datafile 2. reservation_name(string) = Specify the name of the reservation. 3. first_endpoint(str) = The first endpoint of the two end points 4. second_endpoint(str) = The second endpoint of the two end points :Returns: 1. status(bool)= True/False """ wdesc = "Disconnect Routes in CloudShell API Host" testcase_Utils.pSubStep(wdesc) testcase_Utils.pNote(file_Utils.getDateTime()) testcase_Utils.pNote("cs_disconnect_routes, cs obj-{}".\ format(cloud_shell), "info") endpoints = [first_endpoint, second_endpoint] status = False cs_res_id = data_Utils.get_object_from_datarepository\ (system_name+"_"+reservation_name+"_reservationId") try: xml_resp = cloud_shell.DisconnectRoutesInReservation( cs_res_id, endpoints) if xml_resp is not None: testcase_Utils.pNote("\n\n *** Cloudshell Disconnect Routes" " successfull for \"{}\"\n".\ format(reservation_name), "info") status = True else: testcase_Utils.pNote("\n\n *** Cloudshell Disconnect Routes" " failed for \"{}\"".\ format(reservation_name), "warning") except Exception as exception: print_exception(exception) testcase_Utils.report_substep_status(status) return status
def diff_json_objects(self, json_object1, json_object2, case_conversion=False): """ Takes two json objects as inputs and calculates the difference between them. :Returns: Returns a status and a comparison result(tuple or None) 1. No difference between two json objects. - status = True - comparison result = None. 2. Difference found between two json objects. - status=False - comparison result = a tuple of two lists list1= items in json1 but not json2 list2= items in json2 but not json1 3. If any exception encountered during comparison: - status=False - comaprison result = None """ result = False try: if case_conversion == True: json_object1 = self.case_conversion_json(json_object1) json_object2 = self.case_conversion_json(json_object2) json_object1 = self.nested_json_object(json_object1) json_object2 = self.nested_json_object(json_object2) list1 = list(json_object1.difference(json_object2)) list2 = list(json_object2.difference(json_object1)) if list1 or list2: print_info("Items in json 1 but not json 2: {0}".format( str(list1))) print_info("\nItems in json 2 but not json 1: {0} ".format( str(list2))) result_list = (list1, list2) else: result, result_list = (True, None) except Exception as exception: print_exception(exception) result_list = None return result, result_list
def start_thread(self, session): """ Starts a thread using the value self.function if self.function is false throws an error that self.function is not available. """ value = False try: if session: self.current_thread = threading.Thread(target=self.collect_log, args=(session, )) self.current_thread.start() else: print_error("Need a valid session to start collecting logs") except Exception as exception: print_exception(exception) else: value = True return value