def print_config(self, lm_config): caption = 'Link Monitor parameters stored' rows = [] rows.append([ 'isOverloaded: {}'.format( 'Yes' if lm_config.isOverloaded else 'No') ]) rows.append(['nodeLabel: {}'.format(lm_config.nodeLabel)]) rows.append([ 'overloadedLinks: {}'.format(', '.join(lm_config.overloadedLinks)) ]) print(printing.render_vertical_table(rows, caption=caption)) print(printing.render_vertical_table([['linkMetricOverrides:']])) column_labels = ['Interface', 'Metric Override'] rows = [] for (k, v) in sorted(lm_config.linkMetricOverrides.items()): rows.append([k, v]) print( printing.render_horizontal_table(rows, column_labels=column_labels)) print(printing.render_vertical_table([['adjMetricOverrides:']])) column_labels = ['Adjacency', 'Metric Override'] rows = [] for (k, v) in sorted(lm_config.adjMetricOverrides.items()): adj_str = k.nodeName + ' ' + k.ifName rows.append([adj_str, v]) print( printing.render_horizontal_table(rows, column_labels=column_labels))
def print_db_delta_prefix(self, kvstore_keyvals, kvstore_prefix_node_names, decision_prefix_dbs, json): """Returns status code. 0 = success, 1 = failure""" prefix_maps = utils.collate_prefix_keys(kvstore_keyvals) for node_name, prefix_db in prefix_maps.items(): kvstore_prefix_node_names.add(node_name) if node_name not in decision_prefix_dbs: print( printing.render_vertical_table([[ "node {}'s prefix db is missing in Decision".format( node_name) ]])) return 1 decision_prefix_db = decision_prefix_dbs[node_name] decision_prefix_set = {} utils.update_global_prefix_db(decision_prefix_set, decision_prefix_db) lines = utils.sprint_prefixes_db_delta(decision_prefix_set, prefix_db) if lines: print( printing.render_vertical_table([[ "node {}'s prefix db in Decision out of sync with " "KvStore's".format(node_name) ]])) print("\n".join(lines)) return 1 return 0
def print_config(self, lm_config: lm_types.LinkMonitorConfig): caption = "Link Monitor parameters stored" rows = [] rows.append([ "isOverloaded: {}".format( "Yes" if lm_config.isOverloaded else "No") ]) rows.append(["nodeLabel: {}".format(lm_config.nodeLabel)]) rows.append([ "overloadedLinks: {}".format(", ".join(lm_config.overloadedLinks)) ]) print(printing.render_vertical_table(rows, caption=caption)) print(printing.render_vertical_table([["linkMetricOverrides:"]])) column_labels = ["Interface", "Metric Override"] rows = [] for (k, v) in sorted(lm_config.linkMetricOverrides.items()): rows.append([k, v]) print( printing.render_horizontal_table(rows, column_labels=column_labels)) print(printing.render_vertical_table([["adjMetricOverrides:"]])) column_labels = ["Adjacency", "Metric Override"] rows = [] for (k, v) in sorted(lm_config.adjMetricOverrides.items()): adj_str = k.nodeName + " " + k.ifName rows.append([adj_str, v]) print( printing.render_horizontal_table(rows, column_labels=column_labels))
def print_db_delta(self, key, value, kvstore_adj_node_names, kvstore_prefix_node_names, decision_adj_dbs, decision_prefix_dbs): if key.startswith(Consts.ADJ_DB_MARKER): kvstore_adj_db = deserialize_thrift_object( value.value, lsdb_types.AdjacencyDatabase) node_name = kvstore_adj_db.thisNodeName kvstore_adj_node_names.add(node_name) if node_name not in decision_adj_dbs: print( printing.render_vertical_table([[ "node {}'s adj db is missing in Decision".format( node_name) ]])) return decision_adj_db = decision_adj_dbs[node_name] lines = utils.sprint_adj_db_delta(kvstore_adj_db, decision_adj_db) if lines: print( printing.render_vertical_table([[ "node {}'s adj db in Decision out of sync with KvStore's" .format(node_name) ]])) print("\n".join(lines)) return if key.startswith(Consts.PREFIX_DB_MARKER): kvstore_prefix_db = deserialize_thrift_object( value.value, lsdb_types.PrefixDatabase) node_name = kvstore_prefix_db.thisNodeName kvstore_prefix_node_names.add(node_name) if node_name not in decision_prefix_dbs: print( printing.render_vertical_table([[ "node {}'s prefix db is missing in Decision".format( node_name) ]])) return decision_prefix_db = decision_prefix_dbs[node_name] decision_prefix_set = {} utils.update_global_prefix_db(decision_prefix_set, decision_prefix_db) lines = utils.sprint_prefixes_db_delta(decision_prefix_set, kvstore_prefix_db) if lines: print( printing.render_vertical_table([[ "node {}'s prefix db in Decision out of sync with KvStore's" .format(node_name) ]])) print("\n".join(lines)) return
def print_expired_keys(self, msg: openr_types.Publication, global_dbs: Dict): rows = [] if len(msg.expiredKeys): print("Traversal List: {}".format(msg.nodeIds)) for key in msg.expiredKeys: rows.append(["Key: {} got expired".format(key)]) # Delete key from global DBs global_dbs.publications.pop(key, None) if key.startswith(Consts.ADJ_DB_MARKER): global_dbs.adjs.pop(key.split(":")[1], None) if key.startswith(Consts.PREFIX_DB_MARKER): prefix_match = re.match(Consts.PER_PREFIX_KEY_REGEX, key) # in case of per prefix key expire, the prefix DB entry does not # contain any prefixes. The prefix must be constructed from the # key. Update the prefix set of the corresponding node. if prefix_match: prefix_set = set() addr_str = prefix_match.group("ipaddr") prefix_len = prefix_match.group("plen") prefix_set.add("{}/{}".format(addr_str, prefix_len)) node_prefix_set = global_dbs.prefixes[prefix_match.group( "node")] node_prefix_set = node_prefix_set - prefix_set else: global_dbs.prefixes.pop(key.split(":")[1], None) if rows: print(printing.render_vertical_table(rows, timestamp=True))
def print_kvstore_values(self, resp): ''' print values from raw publication from KvStore :param resp kv_store_types.Publication: pub from kv store ''' rows = [] for key, value in sorted(resp.keyVals.items(), key=lambda x: x[0]): val = self.deserialize_kvstore_publication(key, value) if not val: if all( isinstance(c, str) and c in string.printable for c in value.value): val = value.value else: val = hexdump.hexdump(value.value, 'return') rows.append([ "key: {}\n version: {}\n originatorId: {}\n " "ttl: {}\n ttlVersion: {}\n value:\n {}".format( key, value.version, value.originatorId, value.ttl, value.ttlVersion, val) ]) caption = "Dump key-value pairs in KvStore" print(printing.render_vertical_table(rows, caption=caption))
def print_unicast_routes( caption: str, unicast_routes: List[network_types.UnicastRoute], prefixes: List[str] = None, ): """ Print unicast routes. Subset specified by prefixes will be printed if specified """ networks = None if prefixes: networks = [ipaddress.ip_network(p) for p in prefixes] route_strs = [] for route in unicast_routes: dest = ipnetwork.sprint_prefix(route.dest) if not ipnetwork.contain_any_prefix(dest, networks): continue paths_str = "\n".join( ["via {}".format(ip_nexthop_to_str(nh)) for nh in get_route_nexthops(route)] ) route_strs.append([dest, paths_str]) print(printing.render_vertical_table(route_strs, caption=caption))
def print_kvstore_values(self, resp: kv_store_types.Publication) -> None: """ print values from raw publication from KvStore""" rows = [] for key, value in sorted(resp.keyVals.items(), key=lambda x: x[0]): val = self.deserialize_kvstore_publication(key, value) if not val: if all( isinstance(c, str) and c in string.printable for c in value.value): val = value.value else: val = hexdump.hexdump(value.value, "return") ttl = "INF" if value.ttl == Consts.CONST_TTL_INF else value.ttl rows.append([ "key: {}\n version: {}\n originatorId: {}\n " "ttl: {}\n ttlVersion: {}\n value:\n {}".format( key, value.version, value.originatorId, ttl, value.ttlVersion, val, ) ]) caption = "Dump key-value pairs in KvStore" print(printing.render_vertical_table(rows, caption=caption))
def print_kvstore_values(self, resp, json_fmt): ''' print values from raw publication from KvStore :param resp kv_store_types.Publication: pub from kv store ''' # Export in json format if enabled if json_fmt: data = {} for k, v in resp.keyVals.items(): data[k] = utils.thrift_to_dict(v) print(utils.json_dumps(data)) return rows = [] for key, value in sorted(resp.keyVals.items(), key=lambda x: x[0]): val = self.deserialize_kvstore_publication(key, value) if not val: if all(isinstance(c, str) and c in string.printable for c in value.value): val = value.value else: val = hexdump.hexdump(value.value, 'return') ttl = 'INF' if value.ttl == Consts.CONST_TTL_INF else value.ttl rows.append(["key: {}\n version: {}\n originatorId: {}\n " "ttl: {}\n ttlVersion: {}\n value:\n {}" .format(key, value.version, value.originatorId, ttl, value.ttlVersion, val)]) caption = "Dump key-value pairs in KvStore" print(printing.render_vertical_table(rows, caption=caption))
def print_routes_table(route_db, prefixes=None): """ print the the routes from Decision/Fib module """ networks = None if prefixes: networks = [ipaddress.ip_network(p) for p in prefixes] route_strs = [] for route in sorted(route_db.routes, key=lambda x: x.prefix.prefixAddress.addr): prefix_str = ipnetwork.sprint_prefix(route.prefix) if not ipnetwork.contain_any_prefix(prefix_str, networks): continue paths_str = "\n".join([ "via {}%{} metric {}".format( ipnetwork.sprint_addr(path.nextHop.addr), path.ifName, path.metric) for path in route.paths ]) route_strs.append((prefix_str, paths_str)) caption = "Routes for {}".format(route_db.thisNodeName) if not route_strs: route_strs.append(["No routes found."]) print(printing.render_vertical_table(route_strs, caption=caption))
def print_db_delta_adj( self, key, value, kvstore_adj_node_names, decision_adj_dbs, json ): """ Returns status code. 0 = success, 1 = failure""" kvstore_adj_db = deserialize_thrift_object( value.value, lsdb_types.AdjacencyDatabase ) node_name = kvstore_adj_db.thisNodeName kvstore_adj_node_names.add(node_name) if node_name not in decision_adj_dbs: print( printing.render_vertical_table( [["node {}'s adj db is missing in Decision".format(node_name)]] ) ) return 1 decision_adj_db = decision_adj_dbs[node_name] return_code = 0 if json: tags = ("in_decision", "in_kvstore", "changed_in_decision_and_kvstore") adj_list_deltas = utils.find_adj_list_deltas( decision_adj_db.adjacencies, kvstore_adj_db.adjacencies, tags=tags ) deltas_json, return_code = utils.adj_list_deltas_json( adj_list_deltas, tags=tags ) if return_code: utils.print_json(deltas_json) else: lines = utils.sprint_adj_db_delta(kvstore_adj_db, decision_adj_db) if lines: print( printing.render_vertical_table( [ [ "node {}'s adj db in Decision out of sync with " "KvStore's".format(node_name) ] ] ) ) print("\n".join(lines)) return_code = 1 return return_code
def print_key_delta(self, key, node): """ print key delta """ print( printing.render_vertical_table( [["key: {} only in {} kv store".format(key, node)]] ) )
def print_publication_delta(title, pub_update, sprint_db=""): print( printing.render_vertical_table([[ "{}\n{}{}".format( title, pub_update, "\n\n{}".format(sprint_db) if sprint_db else "", ) ]]))
def print_adjs_table(adjs_map, enable_color, neigh=None, interface=None): ''' print adjacencies :param adjacencies as list of dict ''' column_labels = [ 'Neighbor', 'Local Interface', 'Remote Interface', 'Metric', 'Weight', 'Adj Label', 'NextHop-v4', 'NextHop-v6', 'Uptime' ] output = [] adj_found = False for node, val in sorted(adjs_map.items()): # report overloaded status in color is_overloaded = val['overloaded'] # report overloaded status in color overload_color = 'red' if is_overloaded else 'green' overload_status = click.style('{}'.format(is_overloaded), fg=overload_color) cap = "{}'s adjacencies, version: {}, Node Label: {}, " "Overloaded?: {}".format( node, val['version'] if 'version' in val else 'N/A', val['node_label'], overload_status if enable_color else ('TRUE' if is_overloaded else 'FALSE')) # horizontal adj table for a node rows = [] seg = '' for adj in sorted(val['adjacencies'], key=lambda adj: adj['otherNodeName']): # filter if set if neigh is not None and interface is not None: if neigh == adj['otherNodeName'] and interface == adj['ifName']: adj_found = True else: continue overload_status = click.style('Overloaded', fg='red') metric = (overload_status if enable_color else 'OVERLOADED') if adj['isOverloaded'] else adj['metric'] uptime = time_since(adj['timestamp']) if adj['timestamp'] else '' rows.append([ adj['otherNodeName'], adj['ifName'], adj['otherIfName'], metric, adj['weight'], adj['adjLabel'], adj['nextHopV4'], adj['nextHopV6'], uptime ]) seg = printing.render_horizontal_table(rows, column_labels, tablefmt='plain') output.append([cap, seg]) if neigh is not None and interface is not None and not adj_found: print('Adjacency with {} {} is not formed.'.format(neigh, interface)) return print(printing.render_vertical_table(output))
def print_routes(caption, routes): route_strs = [] for route in routes: dest = utils.sprint_prefix(route.dest) paths_str = '\n'.join( ["via {}".format(ip_nexthop_to_str(nh)) for nh in route.nexthops]) route_strs.append((dest, paths_str)) print(printing.render_vertical_table(route_strs, caption=caption))
def print_db_diff(self, nodes_set_a, nodes_set_b, db_sources, db_type): rows = [] for node in sorted(nodes_set_a - nodes_set_b): rows.append(["node {}'s {} db in {} but not in {}".format( node, db_type, *db_sources)]) for node in sorted(nodes_set_b - nodes_set_a): rows.append(["node {}'s {} db in {} but not in {}".format( node, db_type, *reversed(db_sources))]) if rows: print(printing.render_vertical_table(rows))
def print_publication_delta(self, title: str, pub_update: List[str], sprint_db: str = "") -> None: print( printing.render_vertical_table([[ "{}\n{}{}".format( title, pub_update, "\n\n{}".format(sprint_db) if sprint_db else "", ) ]]))
def print_peers(self, peers: kv_store_types.PeersMap) -> None: """ print the Kv Store peers """ host_id = utils.get_connected_node_name(self.cli_opts) caption = "{}'s peers".format(host_id) rows = [] for (key, value) in sorted(peers.items(), key=lambda x: x[0]): row = [key] row.append("cmd via {}".format(value.cmdUrl)) row.append("pub via {}".format(value.pubUrl)) rows.append(row) print(printing.render_vertical_table(rows, caption=caption))
def print_config(self, prefix_alloc): seed_prefix = prefix_alloc.seedPrefix seed_prefix_addr = ipnetwork.sprint_addr(seed_prefix.prefixAddress.addr) caption = 'Prefix Allocator parameters stored' rows = [] rows.append(['Seed prefix: {}/{}'.format(seed_prefix_addr, seed_prefix.prefixLength)]) rows.append(['Allocated prefix length: {}'.format( prefix_alloc.allocPrefixLen)]) rows.append(['Allocated prefix index: {}'.format( prefix_alloc.allocPrefixIndex)]) print(printing.render_vertical_table(rows, caption=caption))
def print_prefixes_table(resp, nodes, iter_func): ''' print prefixes ''' def _parse_prefixes(rows, prefix_db): if isinstance(prefix_db, kv_store_types.Value): prefix_db = deserialize_thrift_object(prefix_db.value, lsdb_types.PrefixDatabase) rows.append(["{}".format(prefix_db.thisNodeName), sprint_prefixes_db_full(prefix_db)]) rows = [] iter_func(rows, resp, nodes, _parse_prefixes) print(printing.render_vertical_table(rows))
def print_peers(self, client: OpenrCtrl.Client, peers_list: Dict[str, Any]) -> None: """ print the Kv Store peers """ host_id = client.getMyNodeName() caption = "{}'s peers".format(host_id) rows = [] for area, peers in peers_list.items(): area = area if area is not None else "N/A" for (key, value) in sorted(peers.items(), key=lambda x: x[0]): row = [f"{key}, area:{area}"] row.append("cmd via {}".format(value.cmdUrl)) rows.append(row) print(printing.render_vertical_table(rows, caption=caption))
def print_peers(self, peers_reply): """ print the Kv Store peers """ host_id = utils.get_connected_node_name(self.host, self.lm_cmd_port) caption = "{}'s peers".format(host_id) rows = [] for (key, value) in sorted(peers_reply.peers.items(), key=lambda x: x[0]): row = [key] row.append("cmd via {}".format(value.cmdUrl)) row.append("pub via {}".format(value.pubUrl)) rows.append(row) print(printing.render_vertical_table(rows, caption=caption))
def print_peers(self, peers_reply): ''' print the Kv Store peers ''' host_id = utils.get_connected_node_name(self.host, self.lm_cmd_port) caption = '{}\'s peers'.format(host_id) rows = [] for (key, value) in sorted(peers_reply.peers.items(), key=lambda x: x[0]): row = [key] row.append('cmd via {}'.format(value.cmdUrl)) row.append('pub via {}'.format(value.pubUrl)) row.append('Public Key: {}'.format(value.publicKey.encode("hex"))) rows.append(row) print(printing.render_vertical_table(rows, caption=caption))
def print_db_diff(self, nodes_set_a, nodes_set_b, db_sources, db_type, json): """ Returns a status code, 0 = success, 1 = failure""" a_minus_b = sorted(nodes_set_a - nodes_set_b) b_minus_a = sorted(nodes_set_b - nodes_set_a) return_code = 0 if json: diffs_up = [] diffs_down = [] for node in a_minus_b: diffs_up.append(node) for node in b_minus_a: diffs_down.append(node) if diffs_up or diffs_down: diffs = { "db_type": db_type, "db_up": db_sources[0], "db_down": db_sources[1], "nodes_up": diffs_up, "nodes_down": diffs_down, } return_code = 1 utils.print_json(diffs) else: rows = [] for node in a_minus_b: rows.append( [ "node {}'s {} db in {} but not in {}".format( node, db_type, *db_sources ) ] ) for node in b_minus_a: rows.append( [ "node {}'s {} db in {} but not in {}".format( node, db_type, *reversed(db_sources) ) ] ) if rows: print(printing.render_vertical_table(rows)) return_code = 1 return return_code
def print_peers( self, client: OpenrCtrl.Client, peers: kv_store_types.PeersMap ) -> None: """ print the Kv Store peers """ host_id = client.getMyNodeName() caption = "{}'s peers".format(host_id) rows = [] for (key, value) in sorted(peers.items(), key=lambda x: x[0]): row = [key] row.append("cmd via {}".format(value.cmdUrl)) row.append("pub via {}".format(value.pubUrl)) rows.append(row) print(printing.render_vertical_table(rows, caption=caption))
def print_mpls_routes(caption: str, mpls_routes: List[network_types.MplsRoute], labels: List[int] = None): """ List mpls routes. Subset specified by labels will be printed if specified """ route_strs = [] for route in mpls_routes: if labels and route.topLabel not in labels: continue paths_str = "\n".join( ["via {}".format(ip_nexthop_to_str(nh)) for nh in route.nextHops]) route_strs.append([str(route.topLabel), paths_str]) print(printing.render_vertical_table(route_strs, caption=caption))
def print_routes(caption, routes, prefixes=None): networks = None if prefixes: networks = [ipaddress.ip_network(p) for p in prefixes] route_strs = [] for route in routes: dest = ipnetwork.sprint_prefix(route.dest) if not ipnetwork.contain_any_prefix(dest, networks): continue paths_str = '\n'.join(["via {}".format(ip_nexthop_to_str(nh)) for nh in route.nexthops]) route_strs.append((dest, paths_str)) print(printing.render_vertical_table(route_strs, caption=caption))
def print_mpls_labels(self, labels: List[int], element_prefix: str = ">", element_suffix: str = "") -> None: """ Print mpls labels. Subset specified by labels_filter only will be printed if specified :param labels: mpls labels :param element_prefix: Starting prefix for each item. (string) :param element_suffix: Ending/terminator for each item. (string) """ label_strs = [[str(label)] for label in labels] print( printing.render_vertical_table(label_strs, element_prefix=element_prefix, element_suffix=element_suffix))
def print_routes_table(route_db): ''' print the the routes from Decision/Fib module ''' route_strs = [] for route in sorted(route_db.routes, key=lambda x: x.prefix.prefixAddress.addr): prefix_str = sprint_prefix(route.prefix) paths_str = '\n'.join([ "via {}@{} metric {}".format(sprint_addr(path.nextHop.addr), path.ifName, path.metric) for path in route.paths ]) route_strs.append((prefix_str, paths_str)) caption = "Routes for {}".format(route_db.thisNodeName) if not route_strs: route_strs.append(['No routes found.']) print(printing.render_vertical_table(route_strs, caption=caption))
def print_expired_keys(self, msg, regex, pattern, global_dbs): rows = [] for key in msg.expiredKeys: if not key.startswith(regex) and not pattern.match(key): continue rows.append(["Key: {} got expired".format(key)]) # Delete key from global DBs global_dbs.publications.pop(key, None) if key.startswith(Consts.ADJ_DB_MARKER): global_dbs.adjs.pop(key.split(':')[1], None) if key.startswith(Consts.PREFIX_DB_MARKER): global_dbs.prefixes.pop(key.split(':')[1], None) if key.startswith(Consts.INTERFACE_DB_MARKER): global_dbs.interfaces.pop(key.split(':')[1], None) if rows: print_timestamp() print(printing.render_vertical_table(rows))