def test_get(self): with requests_mock.Mocker() as mrequests: mrequests.register_uri('GET', MOCK_URL, text=MOCK_TEXT) restclient = rest_client.RestClient(MOCK_URL) response = restclient.get() content = response.content.decode(response.encoding) self.assertEqual(content, MOCK_TEXT) mrequests.register_uri('GET', MOCK_URL, exc=requests.exceptions.ConnectTimeout) restclient = rest_client.RestClient(MOCK_URL) with self.assertRaises(requests.exceptions.ConnectionError): restclient.get()
def capture_node_info(args): url = ("http://{}:{}/jolokia/read".format(args.ip, args.port)) odl_client = rest_client.RestClient(username=args.user, password=args.pw, url=url, timeout=2) print("--------------------------------------- Getting SyncStatus ---------------------------------------------\n") get_cluster_sync_status(odl_client) print("--------------------------------------------------------------------------------------------------------\n") print("--------------------------------------- Datastore Status -----------------------------------------------\n") get_datastore_stats(odl_client) print("--------------------------------------------------------------------------------------------------------\n") print("----------------------------------- DOMDataBroker CommitStats Details ----------------------------------\n") get_dombroker_commit_stats(odl_client) print("--------------------------------------------------------------------------------------------------------\n") print("----------------------------------- DISTRIBUTED DATASTORE COMMIT RATE ----------------------------------\n") get_datastore_commit_rate(odl_client) print("--------------------------------------------------------------------------------------------------------\n") print("----------------------------------- NETSTAT DETAILS FOR PORT 2550 --------------------------------------\n") get_netstat_details_for_akka_port() print("--------------------------------------------------------------------------------------------------------\n") print("----------------- CPU AND MEMORY UTILIZATION OF THE KARAF PROCESS GATHERED BY 'TOP' --------------------\n") get_cpu_and_memory_utilization() print("--------------------------------------------------------------------------------------------------------\n") print("----------------------------------- FREE AND USED MEMORY IN THE SYSTEM ---------------------------------\n") get_free_and_used_memory() print("--------------------------------------------------------------------------------------------------------\n") print("----------------------------------- NODE HEALTH CHECK STATUS -------------------------------------------\n") print(get_node_health_check_status(odl_client)) print("--------------------------------------------------------------------------------------------------------\n") print("-------------------------------------- lsof of KARAF Process -------------------------------------------\n") get_karaf_lsof() print("--------------------------------------------------------------------------------------------------------\n")
def test_get_exceptions_txt_file(self): log_text = files.read(self.excepts_log_path) path = "{}/1/odl_1_exceptions.txt.gz".format(JOBNAME) url = "{}/{}".format(LOGURL, path) restclient = rest_client.RestClient(LOGURL) with requests_mock.Mocker() as mrequests: mrequests.register_uri('GET', url, text=log_text) log = reports.get_log_file(restclient, path) self.assertEqual(log[0:5], "=====")
def test_get_console_log_file(self): log_text = files.read(self.console_log_path) path = "{}/1/console.log.gz".format(JOBNAME) url = "{}/{}".format(LOGURL, path) restclient = rest_client.RestClient(LOGURL) with requests_mock.Mocker() as mrequests: mrequests.register_uri('GET', url, text=log_text) log = reports.get_log_file(restclient, path) self.assertEqual(log[0:5], "pybot")
def init_rest_client(args, timeout=5): global odl_client if odl_client is None: url_root, url_path = make_url_parts(args, None) odl_client = rest_client.RestClient(username=args.user, password=args.pw, url=url_root, timeout=timeout) return odl_client
def __init__(self, url, jobname): self.url = url self.jobname = jobname self.jobs = {} self.first_job = 0 self.last_job = 0 self.restclient = rest_client.RestClient(url, timeout=(5, 15)) self.reports = collections.OrderedDict() # (.*?) - group(1): capture suite name # ((.*?)\.(.*)|.*)') - group(2): capture from second suite name to end of line # (.*?) - group(3): capture second suite name when followed by test name # (.*)|.*) - group(4): capture test name tests_re = r'^Starting test: (.*?)\.((.*?)\.(.*)|.*)' # (^Exception was matched to:.*) - group(5): matched exceptions # (exception is new:.*) - group(6): new exceptions exs_re = r'^Exception was matched to: (.*)|(Exception is new)' self.re_tests = re.compile(tests_re + '|' + exs_re)
def __init__(self, url, jobname): self.url = url self.jobname = jobname self.jobs = {} self.first_job = 0 self.last_job = 0 self.failed_jobs = [] self.failed_devstack = 0 self.failed_stack = 0 self.restclient = rest_client.RestClient(url, timeout=(5, 15)) self.reports = collections.OrderedDict() self.reports["jobname"] = jobname # ^([0-9]{2}.*) :: .*$ - captures suite name in the group 1 # (^.*) \| (PASS|FAIL) \|$') - group 2 captures test name, group 3 captures PASS or FAIL self.re_tests = re.compile( r'^([0-9]{2}.*) :: .*$|(^.*) \| (PASS|FAIL) \|$') time_re = '^Total elapsed time: ([0-9]:[0-9]{2}:[0-9]{2}), stacking time: ([0-9]:[0-9]{2}:[0-9]{2})$' stack_re = '^node ([0-2]) [0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}: stacking has failed$' self.re_time_stack = re.compile(r'(' + time_re + '|' + stack_re + ')')
def cluster_monitor(stdscr, controllers, username, password, data_store, excluded_shards=None): shards = set() (maxy, maxx) = stdscr.getmaxyx() controller_len = constants.MAX_CONTROLLER_NAME_LEN field_len = 0 stdscr.addstr(len(controllers) + 3, 0, 'Polling controllers, please wait...', curses.color_pair(constants.WHITE_ON_BLACK)) stdscr.addstr(len(controllers) + 4, 0, 'Press q or use ctrl + c to quit.', curses.color_pair(constants.WHITE_ON_BLACK)) stdscr.refresh() # create rest clients for each controller for controller in controllers: url = ("http://{}:{}/jolokia/read".format(controller['ip'], controller['port'])) controller['client'] = rest_client.RestClient( username=username, password=password, url=url, timeout=2) key = -1 while key != ord('q') and key != ord('Q'): (maxy, maxx) = handle_window_resize(stdscr, maxy, maxx) key = max(key, stdscr.getch()) # Retrieve controller names and shard names. for controller in controllers: key = max(key, stdscr.getch()) if key == ord('q') or key == ord('Q'): break url_path = ("org.opendaylight.controller:Category" "=ShardManager,name=shard-manager-{},type=Distributed" "{}Datastore".format(data_store.lower(), data_store)) odl_client = controller['client'] try: data = odl_client.request('get', url_path).json() except (exceptions.ConnectionError, exceptions.ReadTimeout, ValueError): data = None # grab the controller name from the first shard try: controller['name'] = data['value']['MemberName'] except (KeyError, TypeError): controller['name'] = "{}:{}".format( controller['ip'], controller['port']) # collect shards found in any controller; does not require # all controllers to have the same shards if data and 'value' in data and 'LocalShards' in data['value']: for local_shard in data['value']['LocalShards']: match = "^.*?-shard-(.+?)-{}$".format(data_store.lower()) m = re.search(match, local_shard) if m and m.group(1) not in excluded_shards: shards.add(m.group(1)) controller_len = max(controller_len, len(controller['name'])) if shards: field_len = max(map(len, shards)) + 2 else: field_len = max(field_len, 0) # Ensure everything fits if controller_len + 1 + (field_len + 1) * len(shards) > maxx: extra = controller_len + 1 + (field_len + 1) * len(shards) - maxx delta = int(math.ceil(float(extra) / (1 + len(shards)))) controller_len -= delta field_len -= delta # no shards found, use default if not shards: real_shards = ['default'] else: real_shards = shards stdscr.move(0, 2) stdscr.clrtoeol() for data_column, shard in enumerate(real_shards): stdscr.addstr(1, controller_len + 1 + (field_len + 1) * data_column, shard.center(field_len), curses.color_pair(constants.WHITE_ON_BLACK)) # display controller and shard headers for row, controller in enumerate(controllers): addr = "{}:{}".format(controller['ip'], controller['port']) stdscr.addstr(row + 2, 0, addr.center(controller_len), curses.color_pair(constants.WHITE_ON_BLACK)) stdscr.addstr(0, 0, 'Controller'.center(controller_len), curses.color_pair(constants.WHITE_ON_BLACK)) stdscr.addstr(0, max(controller_len + 1, 10), 'Shards/Status'.center(field_len), curses.color_pair(constants.WHITE_ON_BLACK)) stdscr.refresh() # display shard status for data_column, shard_name in enumerate(real_shards): key = max(key, stdscr.getch()) if key == ord('q') or key == ord('Q'): break if shard_name not in excluded_shards: cluster_stat = get_cluster_roles(shard_name, controllers, data_store) for row, controller in enumerate(controllers): status = size_and_color(cluster_stat, field_len, controller["ip"]) stdscr.addstr(row + 2, controller_len + 1 + (field_len + 1) * data_column, status['txt'], status['color']) time.sleep(0.5) stdscr.refresh()