def db_session(): filepath = os.path.join(expanduser('~'), "user_info.yaml") globalfilepath = os.path.join(expanduser('~'), "global_conf.yaml") glob_data = get_data_from_yaml(globalfilepath) user_data = get_data_from_yaml(filepath) if glob_data and glob_data.get('EnableDatabase'): if glob_data.get('Database'): db_name = glob_data['Database'].get('database_name') serverip = glob_data['Database'].get('serverip') if user_data and user_data.get('Database'): user_name = user_data['Database'].get('username') pwd = user_data['Database'].get('pwd') #decrypting the password db_pwd = aes().decrypt(pwd) DATABASE_URL = 'postgresql+psycopg2://' + user_name + ':%s' % ( db_pwd) + '@' + serverip + '/' + db_name #DATABASE_URL = DATABASE_URL.encode('utf-8').strip() else: DATABASE_URL = None session = None if DATABASE_URL: #creating engine engine = create_engine(DATABASE_URL) session = sessionmaker(expire_on_commit=False) session.configure(bind=engine) #creates tables Base.metadata.create_all(engine) #session object session = session() return session
def test_chkstats(jconn): global_conf_path = os.path.join(os.getcwd(), 'global_conf.yaml') user_info_conf_path = os.path.join(os.getcwd(), 'user_info.yaml') glob_data = get_data_from_yaml(global_conf_path) user_data = get_data_from_yaml(user_info_conf_path) url = glob_data.get("Jira").get("url") username = user_data.get("Jira").get("username") password = user_data.get("Jira").get("password") project = glob_data.get("Jira").get("project") affects_version = glob_data.get("Jira").get("affects_version") env = user_data.get("env") watcher = user_data.get("Jira").get("watcher") pid = '' outputfile = os.getcwd() with open('unit_tests_data.yaml', 'r') as f: data = yaml.load(f, Loader=yaml.FullLoader) pobj = ParseAndRaiseBug(url, username, password, project, affects_version, env, watcher, pid, outputfile, robo_file=None, tc_management=None) pobj.chk_stats(outputfile)
def __init__(self): self.db = db() filepath = os.path.join(os.path.expanduser('~'), "user_info.yaml") globalfilepath = os.path.join(os.path.expanduser('~'), "global_conf.yaml") userdata = get_data_from_yaml(filepath) glob_data = get_data_from_yaml(globalfilepath) self.dbserver = glob_data['Database']['serverip'] self.username = userdata['Database']['serverusername'] self.password = AESCipher().decrypt( userdata['Database']['serverpassword']) self.servertype = glob_data['Database']['serverOStype']
def __init__(self, url, username, password, project, affects_version, env, watcher, pid, outputfile, robo_file=None, tc_management=None): """ - **parameters**, **types**, **return** and **return types**:: param url: url to connect jira and raise or update bug. param username: username to login to jira param password: password to login into jira param project: project name to raise bug in that project. """ self.url = url self.username = username self.password = password self.project = project self.affects_version = affects_version self.env = env self.watcher = watcher self.tsid = pid self.robo_file = robo_file self.tc_management = tc_management self.outputfile = outputfile self.globfilepath = os.path.join(os.path.expanduser('~'), "global_conf.yaml") self.globdata = get_data_from_yaml(self.globfilepath)
def __init__(self, recipients): """ Creates connection to smtp mail server - **parameters**, **types**, **return** and **return types**:: param recipients: List of recipients param type: list """ self.recipients = recipients self.sender = '*****@*****.**' globalfilepath = os.path.join(expanduser('~'), "global_conf.yaml") self.glob_data = get_data_from_yaml(globalfilepath) if not self.recipients: raise MailRecipientsException try: smtpip = self.glob_data["Email"]["smtpip"] smtpport = self.glob_data["Email"]["smtpport"] self.mailconn = smtplib.SMTP(smtpip, int(smtpport)) except Exception as e: if 'errno' in dir(e): if e.errno == 10060: raise MailConnectionException( 'Mail connection is not established check smtp ip or port' ) if e.errno == 10013: raise MailSocketAccessDeniedException('An attempt was made' 'to access a socket ' 'in a way forbidden ' 'by its access ' 'permissions') else: raise else: raise
def test_validate_duplicate_issues(jconn): def get_cwd(*args, **kwargs): return os.getcwd() monkeypatch = MonkeyPatch() monkeypatch.setattr(os.path, "expanduser", get_cwd, raising=True) global_conf_path = os.path.join(os.getcwd(), 'global_conf.yaml') user_info_conf_path = os.path.join(os.getcwd(), 'user_info.yaml') glob_data = get_data_from_yaml(global_conf_path) user_data = get_data_from_yaml(user_info_conf_path) url = glob_data.get("Jira").get("url") username = user_data.get("Jira").get("username") password = user_data.get("Jira").get("password") project = glob_data.get("Jira").get("project") affects_version = glob_data.get("Jira").get("affects_version") env = user_data.get("env") watcher = user_data.get("Jira").get("watcher") pid = '' outputfile = '' with open('unit_tests_data.yaml', 'r') as f: data = yaml.load(f, Loader=yaml.FullLoader) pobj = ParseAndRaiseBug(url, username, password, project, affects_version, env, watcher, pid, outputfile, robo_file=None, tc_management=None) tc_name = "Test1" msg = "SUTAS issue" att = outputfile summaries = ["SUTAS issue", "test2 failed"] issue_keys = ["SUTAS-1", "SUTAS-2"] issobj1 = mock.MagicMock() issobj2 = mock.MagicMock() issues = [issobj1, issobj2] for issue in issues: issue.fields.status.name.lower.return_value = 'done' fields = data['project_fields'] jconn.conn.fields.return_value = fields jconn.get_summary.return_value = summaries, issue_keys, issues pobj.validate_duplicate_issue(msg, jconn, att, tc_name) pass
def update_dict(self, filename, new_dict): if os.path.exists(filename): old_dict = get_data_from_yaml(filename) yamldict = old_dict.copy() yamldict.update(new_dict) else: yamldict = new_dict dump_data_into_yaml(filename, yamldict, mode="w")
def __init__(self, url): """ Initialize the Teamslib class - **parameters**, **types**, **return** and **return types**:: :param url: url of the incoming webhook configured in MS Teams :type url: String """ self.url = url self.globalfilepath = os.path.join(expanduser('~'), "global_conf.yaml") self.glob_data = get_data_from_yaml(self.globalfilepath)
def __init__(self, logfilepath): """ Gets username,password and ipaddress of webserver - **parameters**, **types**, **return** and **return types**:: param logfilepath: file path to be pushed to web server type logfilepath: String """ #filepath is user_info.yaml file path #globalfilepath is global_conf.yaml file path filepath = os.path.join(os.path.expanduser('~'), "user_info.yaml") globalfilepath = os.path.join(os.path.expanduser('~'), "global_conf.yaml") userdata = get_data_from_yaml(filepath) self.glob_data = get_data_from_yaml(globalfilepath) self.path = '/var/www/html/sutas-logs/' #Eg:logfilepath "C:\Users\admin\Sutas_Logs\test_suite_980_2017Sep18_162033\log.html" self.source = logfilepath self.serverip = self.glob_data['TestArtifact']['serverip'] self.username = userdata['TestArtifact']['username'] if self.glob_data['EnableTestArtifacts'].lower() == "yes": self.password = AESCipher().decrypt( userdata['TestArtifact']['password'])
def _update_dict(self, filename, new_dict): """ Updates the global conf file. - **parameters**, **types**, **return** and **return types**:: param filename: name of the file type filename: String param new_dict: dictionary with which file will be updated type new_dict: dictionary """ if os.path.exists(filename): old_dict = get_data_from_yaml(filename) yamldict = old_dict.copy() yamldict.update(new_dict) else: yamldict = new_dict dump_data_into_yaml(filename, yamldict, mode="w")
def __init__(self, username, channelname, slack_token): """ Initialize the Slacklib class - **parameters**, **types**, **return** and **return types**:: param username : username of the slack member who runs the test suite param channelname : Channel to which notifications will be sent param slack_token : Slack_token of user type username: String type channelname: String type slack_token: String """ self.channelname = channelname self.username = username try: #slack token of user in that channel for sending notifications. slack_token = aes().decrypt(slack_token) except Exception as e: logger.warn("Invalid slack token") #self.slk = SlackClient(slack_token) self.slk = WebClient(token=slack_token) self.channelid = self.get_channelid() self.message = "" self.globalfilepath = os.path.join(expanduser('~'), "global_conf.yaml") self.glob_data = get_data_from_yaml(self.globalfilepath)
def __init__(self): """ Gets symmetric_key from global_conf.yaml file and encodes it with hashlib.sha256 module. """ self.blocksize = 128 #read the values from gloabl_conf.yaml and get the symmetric key. try: globfile = os.path.join(expanduser('~'), "global_conf.yaml") glob_data = get_data_from_yaml(globfile) symmetric_key = glob_data['symmetric_key'] #if the file is not available or do not have permissions to read #or any problem in reading the file then raises Exception. except IOError as e: if e.errno == 13: logger.error("user do not have file permissions to " "read the file", e.filename) elif e.errno == 2: logger.error("no such file or directory", e.filename) else: logger.error("some problem in opening file." "please check it", e.filename) #Encodes the symmetric key with sha256. self.key = hashlib.sha256(symmetric_key.encode()).digest()
def __init__(self): self.filepath = os.path.join(expanduser('~'), "user_info.yaml") self.globalfilepath = os.path.join(expanduser('~'), "global_conf.yaml") self.glob_data = get_data_from_yaml(self.globalfilepath) self.data = get_data_from_yaml(self.filepath)
def send_message(): """ Creates slackobj and sends consolidated slack message """ global count sutasfolder = os.path.join(os.path.expanduser('~'), "Sutas_Logs") slckobj = SlackNotification().slackobj() slc = slckobj.send_message() tmsobj = TeamsNotification().teamsobj() tms = tmsobj.send_message() globfilepath = os.path.join(expanduser('~'), "global_conf.yaml") globdata = get_data_from_yaml(globfilepath) if "logpath" in os.environ: # getting the testsuite name from logpath mailfile = os.path.basename(os.environ['logpath']).split( str(datetime.datetime.now().year))[0] # Inside testsuite folder in sutaslogs we are creating a file with # testsuite name. This file will be used to store the notification # messages mailfile = os.path.join(os.path.dirname(os.environ['logpath']), mailfile) if os.path.isfile(mailfile): if count == 0: os.remove(mailfile) count = 1 # suitelogpaths file is created in sutaslog folder which is # in user's home directory. suitelogpaths = os.path.join(sutasfolder, "suitelogpaths") flag = False if globdata.get('Consolidatedmail', 'no').lower() == 'yes': mode = "a" else: mode = "w" # Checks if suitelogpath file already exists. if os.path.isfile(suitelogpaths): # checking if the logpath is already in the suitelogpaths file. # if path exists then continue else writes the path in to file. with open(suitelogpaths, 'r') as suite: for line in suite.read().strip().splitlines(): if os.environ['logpath'] in line: flag = True if not flag: with open(suitelogpaths, mode) as suite: suite.write(os.environ['logpath']) suite.write('\n') else: # creates suitelogpaths file if doesn't exist and writes # log path in to it. with open(suitelogpaths, mode) as suite: suite.write(os.environ['logpath']) suite.write('\n') #writing notification messages in to a testsuite file which is #created in testsuite folder. with open(mailfile, 'a') as agg: agg.write(os.environ["sutasmessages"]) os.environ["sutasmail"] = os.environ["sutasmessages"] os.environ["sutasmessages"] = "" msgs = {"slack": slc, "teams": tms} if slc != "success" or tms != "success": return msgs else: return "success"
def clone_test_suite(j_conn, master_ts_obj, a_test_cases, testtype): """ Responsible for clone the test from master test suite :param j_conn: JIRA connection object. :param master_ts_obj: test suite jira object :param a_test_cases: list of test cases to be cloned. returns cloned test suite jira object """ fieldmap = {field['name']: field['id'] for field in j_conn.fields()} master_suite_ids = os.environ.get('multiplesuiteids', None) if master_suite_ids: master_suite_ids = master_suite_ids.split(',') if a_test_cases: ts_f = master_ts_obj.fields() globfilepath = os.path.join(os.path.expanduser('~'), "global_conf.yaml") globdata = get_data_from_yaml(globfilepath) project = globdata['TM_Project'] fixversion = globdata['fix_version'] sprint_name = globdata['sprint_number'] sprint_id = TestManagement.get_sprint_id(j_conn, sprint_name) sprint_field = fieldmap.get('Sprint') data = {} for field_name in master_ts_obj.raw['fields']: data[field_name] = master_ts_obj.raw['fields'][field_name] final_fix = [] if data['fixVersions']: fix_v_list = [f['name'] for f in data['fixVersions']] jra = j_conn.project(project) versions = j_conn.project_versions(jra) versions = [v.name for v in reversed(versions)] if fixversion not in versions: try: fix = j_conn.create_version(name=fixversion, project=project) fixversion = fix.name except: msg = 'Unable to create version {}'.format(fixversion) logger.warn(msg) fixversion = None fix_v_list.pop() fix_v_list.append(fixversion) for i in fix_v_list: if i == fixversion: final_fix.append({'name': i}) ts_fields = { 'project': { 'key': project }, 'summary': 'CLONE - ' + ts_f.summary, 'description': ts_f.description, 'issuetype': { 'name': ts_f.issuetype.name }, 'labels': data['labels'], 'fixVersions': final_fix } if master_suite_ids: ts_fields['summary'] = 'CLONE - ' + '_'.join(master_suite_ids) cloned_ts_obj = j_conn.create_issue(fields=ts_fields) if sprint_id: cloned_ts_obj.update(fields={sprint_field: sprint_id}) else: logger.warn( "Sprint {} not fount in jira project {} or not associated to any board" .format(sprint_name, project)) if master_suite_ids: for master_suite_id in master_suite_ids: master_ts_obj = j_conn.issue(master_suite_id) j_conn.create_issue_link('clones', cloned_ts_obj, master_ts_obj) j_conn.create_issue_link('is cloned by', master_ts_obj, cloned_ts_obj) else: j_conn.create_issue_link('clones', cloned_ts_obj, master_ts_obj) j_conn.create_issue_link('is cloned by', master_ts_obj, cloned_ts_obj) TestManagement._clone_test_case(j_conn, cloned_ts_obj, \ a_test_cases, testtype) return cloned_ts_obj else: raise Exception("No test cases found in master suite with \ provided configuration in global conf file")
def get_test_case_tags(conn, ts_id, testtype, rerun=None, type='failed'): """ param: conn: jira connection object param: ts_id: test suite id """ globfilepath = os.path.join(os.path.expanduser('~'), "global_conf.yaml") globdata = get_data_from_yaml(globfilepath) enable_test_mgt = True if 'EnableTestManagement' not in globdata or globdata.get( 'EnableTestManagement', 'no').lower() == "no": enable_test_mgt = False # fetching test cases from given master test suite. if ',' in ts_id: test_cases = [] ts_id = ts_id.split(',') for parent in ts_id: cases = conn.search_issues('parent=' + parent, startAt=0, maxResults=100) cases_b = conn.search_issues('parent=' + parent, startAt=100, maxResults=100) cases.extend(cases_b) test_cases.extend(cases) else: test_cases = [] test_cases = conn.search_issues('parent=' + ts_id, startAt=0, maxResults=100) test_cases_b = conn.search_issues('parent=' + ts_id, startAt=100, maxResults=100) test_cases.extend(test_cases_b) if not test_cases: raise Exception( "No Testcases found to clone for the given Suite {}".format( ts_id)) fieldmap = {field['name']: field['id'] for field in conn.fields()} os.environ.__dict__['fieldmap'] = {} os.environ.__dict__['fieldmap'].update(fieldmap) #sutas_id and Automated are customfields added fro sutas framework, \ # so we don't get these fields dirctly with issue object. sutas_id_field = fieldmap['sutas_id'] try: auto_field = fieldmap['Automated?'] except KeyError: auto_field = fieldmap['Automated'] not_planned = '\n Below mentioned test cases are in "not planned"' \ 'state hence not running them.\n' not_automated = "\n Below mentioned test cases has Automated field" \ "as 'No' hence not running them.\n" np_summ = [] na_summ = [] np_id = [] na_id = [] tc_list = [] tc_data = {} to_be_cloned_test_cases = [] np_slack_str = '' na_slack_str = '' #checking wheather automated and sutas_id fields are existed or not if auto_field and sutas_id_field: # if test case marked as not planed or it is not automatable, those kind \ # of test cases will be ignored for test_case in test_cases: if test_case.fields().status.name.lower() == 'not planned': np_summ.append(test_case.fields().summary.encode('utf-8')) np_id.append(test_case.key) elif test_case.raw['fields'][auto_field] == None: na_summ.append(test_case.fields().summary.encode('utf-8')) na_id.append(test_case.key) elif test_case.fields().status.name.lower() != 'not planned' and \ test_case.raw['fields'][auto_field][0]['value'].lower() == 'yes': if test_case.raw['fields'][sutas_id_field]: tc_list.append(test_case.raw['fields'][sutas_id_field]) to_be_cloned_test_cases.append(test_case) #sending not planned and not aumatable testcase list as notification through slack np_slack_str = TestManagement._format_slack_str( test_case, np_summ, np_id) na_slack_str = TestManagement._format_slack_str( test_case, na_summ, na_id) if np_slack_str: notify.message(not_planned) notify.message(np_slack_str) if na_slack_str: logger.info(not_automated) logger.info(na_slack_str) #notify.message(not_automated) #notify.message(na_slack_str) if tc_list: logger.info( "Testcases which are automated and updated with sutas_id in jira {}" .format(tc_list)) #It will filter test cases based on the conifiguration provided by user in user_configuration file #for test_type and sprint_no robosuitepath = os.environ['robosuitepath'] suite = TestData(parent=None, source=robosuitepath) robotctagids = [] robotcnames = [] for testcase in suite.testcase_table: robotcnames.append(testcase.name.lower()) robotctagids.append(testcase.tags.value) robotcdict = dict(list(zip(robotcnames, robotctagids))) if '-t' in sys.argv: to_be_cloned_test_cases = [] tcnames = os.environ['testnames'].lower().split(',') for tc in tcnames: if tc in robotcnames: for jira_tc in tc_list: if jira_tc in robotcdict[tc]: tcid = jira_tc to_be_cloned_test_cases.append( conn.issue(tcid)) else: raise Exception( "Robot file doesn't contain tc {} Mentioned". format(tc)) else: to_be_cloned_test_cases = TestManagement.filter_tcs( conn, to_be_cloned_test_cases, testtype) if not to_be_cloned_test_cases: raise Exception( "Not found any testcase to clone for the given suite with given configuration" ) final_tcs = [] for test_case in to_be_cloned_test_cases: if not rerun: for robotctagid in robotctagids: if robotctagid: if test_case.raw['fields'][ sutas_id_field] in robotctagid: final_tcs.append(test_case.raw['fields'] [sutas_id_field]) continue else: if test_case.fields().status.name.lower( ) == 'failed' and type == 'failed': final_tcs.append( test_case.raw['fields'][sutas_id_field]) elif test_case.fields().status.name.lower( ) == 'skipped' and type == 'skipped': final_tcs.append( test_case.raw['fields'][sutas_id_field]) tc_list = list(set(final_tcs)) tags = 'OR'.join(tc_list) logger.info(tags) else: raise UserException("Make sure fields 'sutas_id' and " \ "'Automated' must be there in every test case") if not tags: raise UserException( "No test cases found in provided test suite : ", ts_id) issue_obj = None if isinstance(ts_id, list): os.environ['multiplesuiteids'] = ','.join(ts_id) iss_obj = conn.issue(ts_id[-1]) else: iss_obj = conn.issue(ts_id) #checking testmanagent enabled or not in configuration files if enable_test_mgt: if iss_obj: if not rerun: #clning the test suite and its test cases from Master test suite clone_testcases = [conn.issue(tc) for tc in tc_list] issue_obj = TestManagement.clone_test_suite(\ conn, iss_obj, clone_testcases, testtype) logger.warn("cloned test suite id : " + issue_obj.key) notify.message("cloned test suite id : " + issue_obj.key) else: issue_obj = iss_obj logger.warn("Rerunning test suite of id : " + issue_obj.key) notify.message("Rerunning test suite of id : " + issue_obj.key) ts_id = issue_obj.id # fetching test cases from cloned test suite. test_cases = conn.search_issues('parent=' + ts_id) for test_case in test_cases: tc_data[test_case.raw['fields'][sutas_id_field]] = \ (test_case.key, test_case.fields().summary) if issue_obj.fields().status.name.upper() in [ 'TODO', 'TO DO' ] and not rerun: for test_case in test_cases: if test_case.raw['fields'][sutas_id_field]: status = test_case.fields().status.name.lower() if status in ['test in progress','running',\ 'passed', 'failed', 'skipped', 'blocked']: raise Exception( "Test case already executed, Before \ executing a test case make sure\ status in todo or ready to run state." ) else: raise Exception( "Make sure 'sutas_id' field updateed \ with test case ID") transitions = conn.transitions(ts_id) #moving test suite to running state for transition in transitions: if transition['name'].lower() in [ 'run', 'suite in progress' ]: conn.transition_issue( ts_id, str(transition['id'])) elif issue_obj.fields().status.name.lower() in [ 'running', 'suite in progress' ]: if not rerun: raise Exception( "Test suite won't run because it is in Running state,\ If you want to run test suit then clone the master \ suite and provide cloned test suite id" ) elif issue_obj.fields().status.name.lower() in [ 'done', 'completed' ]: if not rerun: raise Exception( "Test suite won't run because it is in Completed state, \ If you want to run test suit then clone the master suite and \ provide cloned test suite id") os.environ.__dict__["testids"] = {} os.environ.__dict__["testids"].update(tc_data) else: raise Exception( "No test suite found with provided test suite id :", ts_id) else: issue_obj = iss_obj return tags, issue_obj
def setUp(self): globfilepath = os.path.join(os.getcwd(), "global_conf.yaml") self.globdata = get_data_from_yaml(globfilepath)
def get_rqmt(self, kwargs): """ Get the requirements dictionary from each test case - **parameters**, **types**, **return** and **return types**:: :param kwargs: Dictionary with devices as keys and softwares as values :type kwargs: Dict """ self.tcname = BuiltIn().get_variable_value("${TEST NAME}") msg = ("\n\nValidating Software/Hardware requirements (%s) for " "test case %s\n " % (kwargs, self.tcname)) logger.info(msg) notify.message(msg) sw_dict = kwargs configfile = os.environ['Configfile'] data = get_data_from_yaml(configfile) for device in sw_dict: try: softwares = sw_dict[device].split(",") check = [ True for i in ["RHEL".lower(), "Windows".lower(), "Switch".lower()] if i in device.lower() ] if check: device_ip = data[str(device)]['ip'] device_user = data[str(device)]['username'] device_pswd = data[str(device)]['password'] if device_ip and device_user and device_pswd: if device.startswith("RHEL"): self.connectLinux(device_ip, device_user, device_pswd, softwares, self.tcname) elif device.startswith("Windows"): self.connectWin(device_ip, device_user, device_pswd, softwares, self.tcname) elif device.startswith("Switch"): self.connectSwitch(device_ip, device_user, device_pswd, self.tcname) elif device.lower().startswith("hitachi"): ip = data[device]['rest_ip'] u_name = data[device]['rest_username'] pwd = data[device]['rest_password'] model = data[device]['model'] serial_number = data[device]['serial_number'] port = data[device]['rest_port'] self.validate_hitachi_software(ip, u_name, pwd, model, serial_number, port, softwares, self.tcname) elif device.lower().startswith("netapp"): ip = data[device]['rest_ip'] u_name = data[device]['rest_username'] pwd = data[device]['rest_password'] netapp_ip = data[device]['netapp_ip'] netapp_username = data[device]['netapp_username'] netapp_password = data[device]['netapp_password'] array_name = data[device]['array_name'] self.validate_netapp_software(ip, u_name, pwd, netapp_ip, netapp_username, netapp_password, array_name, softwares, self.tcname) except KeyError as e: text = "Device {}, connection info missing in config file" \ "so, software cannot be checked for this device." \ "Hence skipping the Test case {} " .format(device, self.tcname) logger.error(text) notify.message(text) raise Exception(e) except Exception as e: logger.error(str(e)) notify.message(str(e)) raise Exception(e)
def user_setup(self): """ Gets the user config details """ username = self.prevdata('Jira.username', self.userprevdata) password = self.prevdata('Jira.password', self.userprevdata) jira_url = self.prevdata('Jira.url', self.globalprevdata) jira_proj = self.prevdata('Jira.project', self.globalprevdata) raise_bugs = self.prevdata('Raise_Bugs', self.globalprevdata) jirabugpriority = self.prevdata('Jira.bugpriority', self.globalprevdata) jirabugseverity = self.prevdata('Jira.bugseverity', self.globalprevdata) jira_affect_version = self.prevdata('Jira.affects_version', self.globalprevdata) environment = self.prevdata('environment', self.globalprevdata) jira_watcher = self.prevdata('Jira.watcher', self.globalprevdata) tmproject = self.prevdata('TM_Project', self.globalprevdata) sprintnumber = self.prevdata('sprint_number', self.globalprevdata) fixversion = self.prevdata('fix_version', self.globalprevdata) log_level = self.prevdata('LogLevel', self.globalprevdata) symmetric_key = "Enter a key to encrypt/decrypt passwords [" +\ self.prevdata('symmetric_key', self.globalprevdata) + "]: " symmetric_key = self.get_input(symmetric_key) environment = "Enter the Execution Environment [" + environment + "]: " environment = self.get_input(environment) tmproject = "Enter Jira Test Management Project Key [" + \ tmproject + "]: " tmproject = self.get_input(tmproject) sprintnumber = "Enter Current Sprint Name [" + \ sprintnumber + "]: " sprintnumber = self.get_input(sprintnumber) fixversion = "Enter Jira testcase/testsuite fix version [" + \ fixversion + "]: " fixversion = self.get_input(fixversion) glob_details = { "symmetric_key": symmetric_key, 'environment': environment, 'sprint_number': sprintnumber, 'fix_version': fixversion, 'TM_Project': tmproject } self.update_dict(self.globalfilepath, glob_details) enabletestmanagement = "Do you want to enable test management as jira [" +\ self.prevdata('EnableTestManagement', self.globalprevdata) + "]:(Yes/No)" enabletestmanagement = self.get_input(enabletestmanagement, choices=["yes", "no"]) if enabletestmanagement.lower() == 'yes': username = "******" + username + "]:" username = self.get_input(username) password = self.get_password("enter Jira APIKEY:", password) jira_url = "Enter Jira URL [" + jira_url + "]: " jira_url = self.get_input(jira_url) raise_bugs = "Do you want to enable Bug Management [" +\ self.prevdata('Raise_Bugs', self.globalprevdata) + "]:(Yes/No)" raise_bugs = self.get_input(raise_bugs, choices=["yes", "no"]) if raise_bugs.lower() == 'yes': jira_proj = "Enter Jira Bug Management Project [" + jira_proj + "]: " jira_proj = self.get_input(jira_proj) jirabugpriority = "Enter Jira bug priority(1:Critcical,2:High,3:Medium,4:Low,5:Trivial) [" + \ jirabugpriority + "]: " jirabugpriority = self.get_input( jirabugpriority, choices=['1', '2', '3', '4', '5']) jirabugseverity = "Enter Jira bug severity(1:Critcical,2:Major,3:Moderate,4:Low,5:Trivial) [" + \ jirabugseverity + "]: " jirabugseverity = self.get_input( jirabugseverity, choices=['1', '2', '3', '4', '5']) jira_affect_version = "Enter the Affects Version [" \ + jira_affect_version + "]: " jira_affect_version = self.get_input(jira_affect_version) logger.warn("By default project lead and test user " "will be added as watcher") if jira_watcher: value = 'yes' else: value = 'no' add_watcher = "Do you want to add more watcher:(Yes/No)[" + \ value + "]: " add_watcher = self.get_input(add_watcher) if add_watcher.lower() == 'yes': jira_watcher = "Enter user name to add as a watcher [" +\ jira_watcher + "]: " jira_watcher = self.get_input(jira_watcher) else: jira_watcher = '' glob_dict = { 'EnableTestManagement': enabletestmanagement, 'Jira': { 'url': jira_url, 'project': jira_proj } } jira_dict = {'Jira': {'username': username, 'password': password}} glob_dict.update({'Raise_Bugs': raise_bugs}) raise_bugs_dict = { 'bugseverity': jirabugseverity, 'bugpriority': jirabugpriority, 'affects_version': jira_affect_version, 'watcher': jira_watcher, } glob_dict['Jira'].update(raise_bugs_dict) self.update_dict(self.filepath, jira_dict) self.update_dict(self.globalfilepath, glob_dict) db_dict = self.add_dbconf() slack_dict = self.add_slackconf() teams_dict = self.add_teamsconf() email_dict = self.add_mailconf() artifact_dict = self.add_artifactserverdetails() self.update_dict(self.filepath, db_dict[0]) self.update_dict(self.globalfilepath, db_dict[1]) self.update_dict(self.filepath, slack_dict[0]) self.update_dict(self.globalfilepath, slack_dict[1]) self.update_dict(self.globalfilepath, email_dict) self.update_dict(self.globalfilepath, teams_dict) self.update_dict(self.filepath, artifact_dict[0]) self.update_dict(self.globalfilepath, artifact_dict[1]) log_level = "\nEnter the log level (info or debug or warn) [" +\ 'warn' + "]:" log_level = self.get_input(log_level, choices=["info", "debug", "warn", "error"]) log_dict = {"LogLevel": log_level.lower()} self.update_dict(self.globalfilepath, log_dict) val_obj = ValidateUserConfig() globdata = get_data_from_yaml(self.globalfilepath) jiralist = [] dblist = [] cronlist = [] maillist = [] comlist = [] comlist1 = [] if globdata.get('Raise_Bugs', 'no').lower() == 'yes': try: val_obj.validate_jira() jiralist = ["Jira", "Validated Successfully"] except Exception as e: notify.message('`\t\t' + str(e).replace("\n", "\n\t\t") + '`') jiralist = [ 'Jira', "Validation Failed with below error:\n\t\t\t" + str(e).replace("\n", "\n\t\t\t") ] else: jiralist = [ "Jira", "TestManagement is set to 'no'. Hence not validating" ] if globdata.get('EnableDatabase', 'no').lower() == 'yes': try: val_obj.validate_database() dblist = ["Database", "Validated Successfully"] except Exception as e: notify.message('`\t\t' + str(e).replace("\n", "\n\t\t") + '`') dblist = [ "Database", "Validation Failed with below error:\n\t\t\t" + str(e).replace("\n", "\n\t\t\t") ] else: dblist = [ "Database", "EnableDatabase is set to 'no'. Hence not validating" ] if globdata.get('EnableTestArtifacts', 'no').lower() == 'yes': try: val_obj.ckeck_cronjob_in_appache_server() cronlist = ["TestArtifact server", "Validated Successfully"] except Exception as e: notify.message('`\t\t' + str(e).replace("\n", "\n\t\t") + '`') cronlist = [ "TestArtifact server", "Validation Failed with below error:\n\t\t\t" + str(e).replace("\n", "\n\t\t\t") ] else: cronlist = [ "TestArtifact server", "EnableTestArtifacts is set to 'no'. Hence not validating" ] if globdata.get('EmailNotifications', 'no').lower() == 'yes': try: val_obj.validate_email() maillist = ["Email", "Validated Successfully"] except Exception as e: notify.message('`\t\t' + str(e).replace("\n", "\n\t\t") + '`') maillist = [ "Email", "Validation Failed with below error:\n\t\t\t" + str(e).replace("\n", "\n\t\t\t") ] else: maillist = [ "Email", "EmailNotifications is set to 'no'. Hence not validating" ] msg = '' if globdata.get('SlackNotifications','no').lower() == 'yes' and \ globdata.get('TeamsNotifications','no').lower() == 'yes': msg = "Slack and MS Teams" elif globdata.get('SlackNotifications','no').lower() == 'yes' and \ globdata.get('TeamsNotifications','no').lower() == 'no': msg = "Slack" comlist1 = [ "MS Teams", "TeamsNotifications is set to no. Hence not validating." ] elif globdata.get('SlackNotifications','no').lower() == 'no' and \ globdata.get('TeamsNotifications','no').lower() == 'yes': msg = "MS Teams" comlist1 = [ "Slack", "SlackNotifications is set to no. Hence not validating." ] if msg: try: val_obj.validate_communication_channels() comlist = [msg, "Validated Successfully"] except Exception as e: if "Slack" in msg and "Teams" in msg: if "Slack" in e and "Teams" not in e: comlist = [ "Slack", "Validation Failed with below error:\n\t\t\t" + str(e).replace("\n", "\n\t\t\t") ] comlist1 = ["MS Teams", "Validated Successfully"] elif "Teams" in e and "Slack" not in e: comlist = [ "MS Teams", "Validation Failed with below error:\n\t\t\t" + str(e).replace("\n", "\n\t\t\t") ] comlist1 = ["Slack", "Validated Successfully"] else: comlist = [ msg, "Validation Failed with below error:\n\t\t\t" + str(e).replace("\n", "\n\t\t\t") ] else: comlist = [ "Slack", "SlackNotifications is set to no. Hence not validating." ] comlist1 = [ "MS Teams", "TeamsNotifications is set to no. Hence not validating." ] msg = tabulate([ jiralist, [], [], dblist, [], [], cronlist, [], [], maillist, [], [], comlist, [], [], comlist1 ], headers=['Application/Server', 'Validation status'], tablefmt='pipe') logger.warn('\n\n' + msg)
def _clone_test_case(j_conn, cloned_ts_obj, a_test_cases, testtype): """ Responsible for clone the test cases which are present in master test suite :param j_conn: JIRA connection object. :param ts_name: cloned test suite jira object returns sucess or exception """ fieldmap = {field['name']: field['id'] for field in j_conn.fields()} testtype_field = fieldmap.get('Test Type') testbuildtype = fieldmap.get('Test Build Type') try: auto_field = fieldmap['Automated?'] except KeyError: auto_field = fieldmap['Automated'] expected_result = fieldmap.get('Expected Result') preconditions = fieldmap.get('Preconditions') sutas_id = fieldmap.get('sutas_id') try: actual_result = fieldmap['Actual result'] except KeyError: actual_result = fieldmap['Actual Result'] globfilepath = os.path.join(os.path.expanduser('~'), "global_conf.yaml") globdata = get_data_from_yaml(globfilepath) project = globdata.get('TM_Project') fixversion = globdata.get('fix_version') logger.warn( str(len(a_test_cases)) + ' tests are cloning from master suite id') for test_case in a_test_cases: data2 = {} for field_name in test_case.raw['fields']: data2[field_name] = test_case.raw['fields'][field_name] final_fix = [] if data2['fixVersions']: fix_v_list = [f['name'] for f in data2['fixVersions']] jra = j_conn.project(project) versions = j_conn.project_versions(jra) versions = [v.name for v in reversed(versions)] if fixversion not in versions: try: fix = j_conn.create_version(name=fixversion, project=project) fixversion = fix.name except: msg = 'Unable to create version {}'.format(fixversion) logger.warn(msg) fixversion = None fix_v_list = [] fix_v_list.append(fixversion) for i in fix_v_list: final_fix.append({'name': i}) tc_f = test_case.fields() tc_fields = {\ 'project':{'key':project}, 'summary':'CLONE - ' + tc_f.summary, 'description':tc_f.description, 'issuetype':{'name':tc_f.issuetype.name}, auto_field:[{"value":str(data2[auto_field][0]['value'])}], 'parent':{'id':cloned_ts_obj.id,}} tc_jira_fields = { 'priority': 'priority', 'labels': 'labels', 'fixVersions': 'fixVersions', 'components': 'components', expected_result: 'expected_result', preconditions: 'preconditions', sutas_id: 'sutas_id', testtype_field: 'testtype_field', testbuildtype: 'testbuildtype' } for field, fieldname in list(tc_jira_fields.items()): if field: tc_fields[field] = data2[field] else: logger.warn( "Field {} doesn't exist for the jira TestCase {}". format(fieldname, test_case.key)) #testbuildtype: data2[testbuildtype] #actual_result:data2[actual_result], #adding environment field tc_fields['environment'] = globdata['environment'] #creating a issue as part of cloning new_issue = j_conn.create_issue(fields=tc_fields) logger.warn(data2[sutas_id] + '-->' + new_issue.key + ':' + 'CLONE - ' + ' '.join(tc_f.summary.split())) if data2['fixVersions']: if final_fix: new_issue.update(fields={"fixVersions": final_fix})
def raise_issue(self, project_name, affects_version, environment, summary, description, watcher=None, component=None, label=['SUTAS-AUTOMATION'], priority=None, comment=None, issuetype=None, attachments=None): """ Raises issue in jira with all fields provides. - **parameters**, **types**, **return** and **return types**:: :param project_name: Name of the project in which jira issue will be created :param affects_version: Build Version in which test suite was run. :param environment: Environment in which test suite run like test/dev :param summary: Summary of the issue created :param description: Description of the issue created. :param watcher: Watchers for the issue created :param component: :param label: Labels could be regression,sanity,smoke,feature,etc. :param priority:priority of the issue created :param comment: Comments if any for the issue :param issuetype: Type of the issue created. :param attachments: Log file to be attached for the issue created """ severitydict = { '1': 'S1: Critcical', '2': 'S2: Major', '3': 'S3: Moderate', '4': 'S4: Low', '5': 'S5: Trivial' } globfilepath = os.path.join(os.path.expanduser('~'), "global_conf.yaml") globdata = get_data_from_yaml(globfilepath) severity = globdata['Jira']['bugseverity'] priority = globdata['Jira']['bugpriority'] try: if ((severity == " ") and (priority == " ")): raise ValueError else: issuetype = {'name': 'Bug'} fieldmap = { field['name']: field['id'] for field in self.conn.fields() } severity_field = fieldmap['Severity'] hitcount = fieldmap['HitCount'] affected_version = [{'name': affects_version}] comp = [{'name': component}] fields = { 'project': self.find_project_by_name(project_name), 'summary': summary, 'description': description, 'priority': { 'id': priority }, severity_field: { 'value': severitydict[severity] }, 'versions': affected_version, 'environment': environment, 'labels': label, 'components': comp, 'issuetype': issuetype } new_issue = self.conn.create_issue(fields=fields, prefetch=True) new_issue.update(fields={fieldmap['HitCount']: 1}) if 'Ticket Source' in fieldmap: new_issue.update( fields={ fieldmap['Ticket Source']: { 'value': 'Quality Engineering' } }) logger.warn('\n\nNew issue "%s" created successfully.\n' % new_issue) proj_key = self.find_project_key(project_name) '''if (len(new_issue == 0)): logger.warn("'\n\nError in creating new issue.\n") else: logger.warn('\n\nNew issue "%s" created successfully.\n' % new_issue) proj_key = self.find_project_key(project_name)''' if watcher: users = watcher.split(",") try: for user in users: try: self.conn.add_watcher(new_issue, user) except Exception as err: msg = '{} cannot be added as watcher'.format( user) logger.warn(msg) lead_watch = self.conn.project( proj_key).raw["lead"]["displayName"] try: self.conn.add_watcher(new_issue, lead_watch) except Exception as err: msg = '{} cannot be added as lead watcher'.format( lead_watch) logger.warn(msg) except KeyError: logger.warn("\nUser not available or No Team lead" "assigned to project.. Hence could not " "add watcher\n") else: try: watcher = self.conn.project( proj_key).raw["lead"]["displayName"] if watcher: try: self.conn.add_watcher(new_issue, watcher) except Exception as err: msg = '{} cannot be added as watcher'.format( watcher) logger.warn(msg) except KeyError: logger.warn("\nNo Team Lead assigned to project." " So, unable to add watcher\n") if attachments: try: self.conn.add_attachment(new_issue, attachments) except Exception as err: msg = 'Not able to add attachments {} to the bug raised'.format( attachments, new_issue) logger.warn(msg) except IOError: disp = '{} file not found'.format(attachments) logger.warn(disp) if comment: try: self.conn.add_comment(new_issue, comment) except Exception as err: msg = 'Not able to add Comments {} to the bug {} raised'.format( comment, new_issue) logger.warn(msg) return new_issue except ValueError: print("Incorrect argument : Null value not allowed")
def filter_tcs(conn, test_cases, test_type): """ Responsible for filtering the test cases with provided sprint number and test type in master test suite. :param a_test_cases: list of test cases other than not planned state and 'Automated' field value is yes. returns filtered test cases. """ globfilepath = os.path.join(os.path.expanduser('~'), "global_conf.yaml") globdata = get_data_from_yaml(globfilepath) fieldmap = {field['name']: field['id'] for field in conn.fields()} sprint_number = '' #test_type = '' tbc_tcs = [] sprint_number = globdata.get('sprint_number', None) #test_type = globdata.get('test_type', None) robosuitepath = os.environ['robosuitepath'] suite = TestData(parent=None, source=robosuitepath) robotctagids = [] for testcase in suite.testcase_table: robotctagids.append(testcase.tags.value) testtype_field = fieldmap.get('Test Type') testbuildtype_field = fieldmap.get('Test Build Type') sprint_field = fieldmap.get('Sprint') for test_case in test_cases: if test_type: #jira_labels = [val.lower().strip() for val in test_case.fields().labels] jira_labels = [] testtype_fields = test_case.raw['fields'].get( testtype_field, None) testbuildtype_fields = test_case.raw['fields'].get( testbuildtype_field, None) if testtype_field in test_case.raw['fields']: jira_labels = ([ val['value'].strip() for val in testtype_fields ] if testtype_fields is not None else []) if testbuildtype_field in test_case.raw[ 'fields'] and testbuildtype_fields: jira_labels.append(testbuildtype_fields['value']) if jira_labels: if 'and' in test_type: labels = [ val.strip() for val in test_type.split('and') ] count = 0 for jira_label in jira_labels: if jira_label in labels: count += 1 if count == len(labels): tbc_tcs.append(test_case) elif (',' in test_type or 'or' in test_type): if ',' in test_type: labels = [ val.strip() for val in test_type.split(',') ] else: labels = [ val.strip() for val in test_type.split('or') ] for jira_label in jira_labels: if jira_label in labels: tbc_tcs.append(test_case) break elif jira_labels and test_type.strip() in jira_labels: tbc_tcs.append(test_case) else: logger.warn( "Test types are not defined for the testcase {} in jira.Please Mention them." .format(test_case)) if tbc_tcs: tbc_tcs = list(set(tbc_tcs)) else: if test_type: if 'and' in test_type: test_types = ','.join(test_type.split('and')) raise Exception( 'Master suite doesnot contain testcases with mentioned labels {} together' .format(test_types)) if 'or' in test_type: test_types = ','.join(test_type.split('or')) raise Exception( 'Master suite doesnot contain testcases with any of mentioned labels {}' .format(test_types)) if ',' in test_type: test_types = ','.join(test_type.split(',')) raise Exception( 'Master suite doesnot contain testcases with any of mentioned labels {}' .format(test_types)) else: raise Exception( 'Master suite doesnot contain testcases with the mentioned label {}' .format(test_type)) else: tbc_tcs = test_cases filteredtcids = [] for robotctagid in robotctagids: if not robotctagid: continue robotctagid = [tag for tag in robotctagid] count = 0 for tbc_tc in tbc_tcs: if tbc_tc.key in robotctagid: if test_type: if 'and' in test_type: test_types = test_type.split('and') for t_type in test_types: if t_type in robotctagid: count += 1 if count == len(test_types): filteredtcids.append(tbc_tc) if 'or' in test_type or ',' in test_type: if 'or' in test_type: test_types = test_type.split('or') if ',' in test_type: test_types = test_type.split(',') for t_type in test_types: if t_type in robotctagid: filteredtcids.append(tbc_tc) else: if test_type in robotctagid: filteredtcids.append(tbc_tc) else: return tbc_tcs return filteredtcids