def delete_support_bundles(self, node_name="master"): t = test.Test() node = t.controller(node_name) data = self.get_support_bundles() helpers.prettify(data) if len(data) == 0: helpers.log("No Support Bundles on controller %s" % node_name) return True else: for i in range(0, len(data)): helpers.log("Deleting Support Bundle Name: %s" % data[i]['name']) delete_url = '/api/v1/data/controller/support/bundle[name="%s"]' % data[ i]['name'] node.rest.delete(delete_url, {}) helpers.log("Success Deleting Support Bundle: %s" % data[i]['name']) helpers.log( "Checking again to check all the support bundles are deleted..") data = self.get_support_bundles() helpers.prettify(data) if len(data) == 0: helpers.log("No Support Bundles on controller %s" % node_name) return True else: helpers.test_failure("Unable to Delete all Support bundles..")
def test_args(self, arg1, arg2=222, arg3=333, arg4=444, arg5=None, arg6=True, arg7=False, arg8=890, arg9=None, arg10=None, arg11=None, arg12=None): helpers.log("locals(): %s" % helpers.prettify(locals())) local_var = 100 print("local_var (1): %s" % local_var) locals()['local_var'] = 123 print("local_var (2): %s" % local_var) #arg1 = 8000 #print("arg1 (1): %s" % arg1) #locals()['arg1'] = 123 #print("arg1 (2): %s" % arg1) helpers.log("locals(): %s" % helpers.prettify(locals()))
def task_finish_check_parallel(self, results, result_dict, timer=60, timeout=1500): ''' task_finish_check_parallel Input: Output: Author: Mingtao ''' helpers.log("***Entering==> task_finish_check_parallel \n") is_pending = True iteration = 0 flag = True while is_pending: is_pending = False iteration += 1 helpers.sleep(int(timer)) helpers.log("USR INFO: result is %s" % results) for res in results: task_id = res.task_id action = result_dict[task_id]["node"] + ' ' + result_dict[task_id]["action"] if res.ready() == True: helpers.log("****** %d.READY - task_id(%s)['%s']" % (iteration, res.task_id, action)) output = res.get() helpers.log("Output after it is ready is %s" % output) else: helpers.log("****** %d.NOT-READY - task_id(%s)['%s']" % (iteration, res.task_id, action)) is_pending = True output = res.get() helpers.log("Output before it is ready is %s" % output) if iteration >= int(timeout) / int(timer): # helpers.test_failure("USR ERROR: the parallel execution did not finish with %s seconds" %timeout) helpers.log("USR ERROR: the parallel execution did not finish with %s seconds" % timeout) return False helpers.log("*** Parallel tasks completed ") # # Check task output # for res in results: helpers.log("Inside for res value is %s" % res) task_id = res.task_id helpers.log_task_output(task_id) helpers.log("Inside for res task id is %s" % task_id) output = res.get() helpers.log("USER INFO: for task %s , result is %s " % (task_id, output)) result_dict[task_id]["result"] = output if output is False: flag = False helpers.log("***** result_dict:\n%s" % helpers.prettify(result_dict)) helpers.log("USER INFO ***** result flag is: %s" % flag) return flag
def fetch_port_stats(self, **kwargs): t = test.Test() if 'node' not in kwargs: node = 'tg1' else: node = kwargs['node'] del kwargs['node'] tg_handle = t.traffic_generator(node).handle() result = tg_handle.ix_fetch_port_stats(**kwargs) helpers.log('result:\n%s' % helpers.prettify(result)) return result
def bash_ls(self, node, path): """ Execute 'ls -l --time-style=+%Y-%m-%d <path>' on a device. Inputs: | node | reference to switch/controller/host as defined in .topo file | | path | directory to get listing for | Example: - bash ls master /home/admin - bash ls h1 /etc/passwd Return Value: - Dictionary with file name as key. Value contains list of fields, where - fields[0] = file/dir - fields[1] = no of links - fields[2] = user - fields[3] = group - fields[4] = size - fields[5] = datetime """ t = test.Test() n = t.node(node) content = n.bash('ls -l --time-style=+%%Y-%%m-%%d %s' % path)['content'] lines = helpers.strip_cli_output(content, to_list=True) # Output: # total 691740 # -rw-r--r-- 1 root root 708335092 2014-03-03 13:08 controller-upgrade-bvs-2.0.5-SNAPSHOT.pkg # -rw-r--r-- 1 bsn bsn 0 2014-03-12 10:05 blah blah.txt # Strip first line ('total <nnnnn>') lines = lines[1:] # helpers.log("lines: %s" % helpers.prettify(lines)) files = {} for line in lines: fields = line.split() helpers.log("fields: %s" % fields) # fields[6]+ contains filename (may have spaces in name) filename = ' '.join(fields[6:]) # If file is a symlink, remove the symlink (leave just the name) # E.g., 'blkid.tab -> /dev/.blkid.tab' filename = re.sub('-> .*$', '', filename) files[filename] = fields[0:6] helpers.log("files:\n%s" % helpers.prettify(files)) return files
def send_mail(): args = prog_args() s = SendMail( sender=args.sender, receiver=args.receiver, subject=args.subject, ) response, _ = s.send(message=args.message, infile=args.infile) if int(response['status']) == 201: print "Message sent successfully." sys.exit(0) else: print "Send mail error.\n%s" % helpers.prettify(response) sys.exit(1)
def cli_get_links_nodes(self,node1, node2): ''' ''' helpers.test_log("Entering ==> cli_get_links_nodes: %s - %s" %( node1, node2) ) t = test.Test() c = t.controller('master') cli= 'show link | grep ' + node1 + ' | grep ' + node2 content = c.cli(cli)['content'] temp = helpers.strip_cli_output(content, to_list=True) helpers.log("INFO: *** output *** \n %s" %temp) linkinfo = {} linkinfo[node1]={} linkinfo[node2]={} for line in temp: line = line.lstrip() fields = line.split() helpers.log("fields: %s" % fields) N1 = fields[1] N2 = fields[3] match = re.match(r'.*-(.*)',fields[2]) intf1= match.group(1) match = re.match(r'.*-(.*)',fields[4]) intf2= match.group(1) linkinfo[N1][intf1]= {} linkinfo[N1][intf1]['name']= intf1 linkinfo[N1][intf1]['nbr']= N2 linkinfo[N1][intf1]['nbr-intf']= intf2 linkinfo[N2][intf2]= {} linkinfo[N2][intf2]['name']= intf2 linkinfo[N2][intf2]['nbr']= N1 linkinfo[N2][intf2]['nbr-intf']= intf1 helpers.log("INFO: *** link info *** \n %s" % helpers.prettify(linkinfo)) return linkinfo
#!/usr/bin/env python # Given the aggregated build (BUILD_NAME argument), print the list of actual builds. import os import sys # Determine BigRobot path(s) based on this executable (which resides in # the bin/ directory. bigrobot_path = os.path.dirname(__file__) + '/..' exscript_path = bigrobot_path + '/vendors/exscript/src' sys.path.insert(0, bigrobot_path) sys.path.insert(1, exscript_path) import autobot.helpers as helpers from catalog_modules.test_catalog import TestCatalog if not 'BUILD_NAME' in os.environ: helpers.error_exit("Environment variable BUILD_NAME is not defined.", 1) build_name = os.environ['BUILD_NAME'] db = TestCatalog() build_names = db.aggregated_build(build_name) print "%s" % helpers.prettify([helpers.unicode_to_ascii(x) for x in build_names])
help=("Jenkins build string," " e.g., 'bvs master #2007'")) parser.add_argument( '--regression-tags', help=("Supported regression tags are 'daily' or 'full'.")) _args = parser.parse_args() # _args.build <=> env BUILD_NAME if not _args.build and 'BUILD_NAME' in os.environ: _args.build = os.environ['BUILD_NAME'] elif not _args.build: helpers.error_exit("Must specify --build option or set environment" " variable BUILD_NAME") else: os.environ['BUILD_NAME'] = _args.build return _args if __name__ == '__main__': args = prog_args() db = TestCatalog() doc = db.find_and_add_build_name(args.build, regression_tags=args.regression_tags, quiet=not args.verbose) if args.verbose: print "Doc (before update): %s" % helpers.prettify(doc) doc = db.find_and_add_build_name_group(args.build, quiet=not args.verbose) if args.verbose: print "Doc (before update): %s" % helpers.prettify(doc)
def populate_db_with_verification_data(self): test_cases = helpers.from_yaml( helpers.file_read_once(self._verification_file)) print "Updating documents in build '%s'" % self.aggregated_build_name() if test_cases == None: # No test cases found return True for tc in test_cases: self.sanitize_test_case_data(tc) query = { "name": tc['name'], "product_suite": tc['product_suite'], "build_name": self.aggregated_build_name(), } aggr_cursor = self.catalog().find_test_cases_archive(query) count = aggr_cursor.count() if count == 0: cat_helpers.warn("Cannot find document matching below query." " No update made.\n%s" % helpers.prettify(query)) else: if count != 1: cat_helpers.warn("Expecting only one aggregated test case," " but result is '%s'" % count) doc = dict(aggr_cursor[0]) # create a copy of dictionary if 'build_name_list' in doc: if doc['build_name_list'][-1] != tc['build_name_verified']: doc['build_name_list'] = (doc['build_name_list'] + [tc['build_name_verified']]) else: doc['build_name_list'] = [tc['build_name_verified']] doc['status'] = tc['status'] doc['build_name_verified'] = tc['build_name_verified'] doc['build_name_orig'] = self.aggregated_build_name() doc['jira'] = tc['jira'] doc['notes'] = tc['notes'] doc['status'] = tc['status'] doc['createtime'] = helpers.ts_long_local() doc['starttime'] = doc['starttime_datestamp'] = None doc['endtime'] = doc['endtime_datestamp'] = None doc['build_number'] = None # print("Updating document:\n%s" % helpers.prettify(doc)) print("Updating suite '%s', test case '%s'" % (doc['product_suite'], doc['name'])) new_doc = self.catalog().upsert_doc('test_cases_archive', doc, query) if new_doc == None: cat_helpers.warn( "Cannot find document. Upsert failed" " for: %s, name:'%s', product_suite:'%s'\n" % (doc, tc['name'], tc['product_suite'])) else: # print("\n new_doc: %s\n" % new_doc) pass
" e.g., 'bvs master #2007'")) parser.add_argument('--aggregated-build-name', help=("Aggregated build name," " e.g., 'bvs master bcf-2.0.0 aggregated'")) _args = parser.parse_args() # _args.build <=> env BUILD_NAME if not _args.build and 'BUILD_NAME' in os.environ: _args.build = os.environ['BUILD_NAME'] elif not _args.build: helpers.error_exit("Must specify --build option or set environment" " variable BUILD_NAME") else: os.environ['BUILD_NAME'] = _args.build return _args if __name__ == '__main__': args = prog_args() db = TestCatalog() doc = db.find_and_add_aggregated_build( args.build, aggregated_build_name=args.aggregated_build_name, quiet=not args.verbose) if args.verbose: print "Doc: %s" % helpers.prettify(doc) print "%s" % doc["build_name"] sys.exit(0)
def task_finish_check_parallel(self, results, result_dict, timer=60, timeout=1500): ''' task_finish_check_parallel Input: Output: Author: Mingtao ''' helpers.log("***Entering==> task_finish_check_parallel (timer:%s, timeout:%s)\n" % (timer, timeout)) is_pending = True iteration = 0 flag = True while is_pending: is_pending = False iteration += 1 helpers.sleep(int(timer)) helpers.log("USR INFO: Iteration=%s." % (iteration)) for res in results: task_id = res.task_id action = result_dict[task_id]["node"] + ' ' + result_dict[task_id]["action"] if res.ready() == True: helpers.log("****** %d.READY - task_id(%s)['%s']" % (iteration, res.task_id, action)) else: helpers.log("****** %d.NOT-READY - task_id(%s)['%s']" % (iteration, res.task_id, action)) is_pending = True if iteration >= int(timeout) / int(timer): # helpers.test_failure("USR ERROR: the parallel execution did not finish with %s seconds" %timeout) helpers.log("USR ERROR: the parallel execution did not finish with %s seconds" % timeout) break helpers.log("*** Parallel tasks completed ") # Archive the ESB task logs for res in results: task_id = res.task_id helpers.log_task_output(task_id) helpers.log(" status: %s" % (res.status)) if res.status == "FAILURE": helpers.log(" Task traceback:\n%s" % res.traceback) helpers.log("-------------") if is_pending: helpers.log("Not able to run ESB tasks successfully.") flag = False return flag # # Check task output # for res in results: task_id = res.task_id output = res.get() helpers.log("USER INFO: for task %s , result is %s " % (task_id, output)) result_dict[task_id]["result"] = output if output is False: flag = False break helpers.log("***** result_dict:\n%s" % helpers.prettify(result_dict)) helpers.log("USER INFO ***** result flag is: %s" % flag) return flag