def get_devices(data: Union[List[dict], dict], sort: str = None) -> Union[List[dict], dict]: data = utils.listify(data) # gather all keys from all dicts in list each dict could potentially be a diff size # Also concats ip/mask if provided in sep fields data = sort_result_keys(data) # strip any cols that have no value across all rows data = strip_no_value(data) # send all key/value pairs through formatters and return data = _unlist([ dict( short_value(k, _check_inner_dict(v)) for k, v in pre_clean(inner).items() if "id" not in k[-3:] and k != "mac_range") for inner in data ]) data = utils.listify(data) data = sorted( data, key=lambda i: (i.get("site") or "", i.get("type") or "", i.get("name") or "")) # if sort and data and sort in data[-1]: # return sorted(data, key=sort) # else: return data
async def update_group_db(self): resp = await self.central.get_all_groups() if resp.ok: resp.output = utils.listify(resp.output) self.updated.append(self.central.get_all_groups) self.GroupDB.truncate() return self.GroupDB.insert_multiple(resp.output)
def get_certificates(data: Dict[str, Any]) -> List[Dict[str, Any]]: data = utils.listify(data) short_keys = { "cert_name": "name", "cert_type": "type", "expire_date": "expiration", "expire": "expired", "cert_md5_checksum": "md5 checksum", "cert_sha1_checksum": "sha1 checksum", } if data and len(data[0]) != len(short_keys): log = logging.getLogger() log.error( f"get_certificates has returned more keys than expected, check for changes in response schema\n" f" expected keys: {short_key.keys()}\n" f" got keys: {data[0].keys()}") return data else: data = [{ short_keys[k]: d[k] if k != "expire_date" else _convert_datestring(d[k]) for k in short_keys } for d in data] return data
def _refresh_token(self, token_data: Union[dict, List[dict]] = []) -> bool: auth = self.auth token_data = utils.listify(token_data) token = None spin = Halo("Attempting to Refresh Token") spin.start() for idx, t in enumerate(token_data): try: if idx == 1: spin.fail() spin.text = spin.text + " retry" spin.start() token = auth.refreshToken(t) if token: auth.storeToken(token) auth.central_info["token"] = token break except Exception as e: log.exception( f"Attempt to refresh token returned {e.__class__.__name__} {e}" ) if token: self.headers[ "authorization"] = f"Bearer {self.auth.central_info['token']['access_token']}" # spin.succeed() spin.stop() else: spin.fail() return token is not None
async def update_dev_db(self): resp = await self.central.get_all_devicesv2() if resp.ok: resp.output = utils.listify(resp.output) self.updated.append(self.central.get_all_devicesv2) self.DevDB.truncate() return self.DevDB.insert_multiple(resp.output)
async def update_template_db(self): groups = self.groups if self.central.get_all_groups in self.updated else None resp = await self.central.get_all_templates(groups=groups) if resp.ok: resp.output = utils.listify(resp.output) self.updated.append(self.central.get_all_templates) self.TemplateDB.truncate() return self.TemplateDB.insert_multiple(resp.output)
def _display_results( data: Union[List[dict], List[str], None] = None, tablefmt: str = "rich", title: str = None, caption: str = None, pager: bool = True, outfile: Path = None, sort_by: str = None, reverse: bool = False, pad: int = None, cleaner: callable = None, **cleaner_kwargs, ): if data: data = utils.listify(data) if cleaner: data = cleaner(data, **cleaner_kwargs) if sort_by and all(isinstance(d, dict) for d in data): if not all([True if sort_by in d else False for d in data]): typer.echo(f"Invalid dataset for {sort_by} not all entries contain a {sort_by} key") typer.secho("sort by is not implemented for all commands yet", fg="red") else: data = sorted(data, key=lambda d: d[sort_by]) if reverse: data = data[::-1] outdata = utils.output( data, tablefmt, title=title, caption=caption, account=None if config.account in ["central_info", "account"] else config.account, config=config, ) typer.echo_via_pager(outdata) if pager and tty and len(outdata) > tty.rows else typer.echo(outdata) # -- // Output to file \\ -- if outfile and outdata: if Path().cwd() != Path.joinpath(config.outdir / outfile): if Path.joinpath(outfile.parent.resolve() / ".git").is_dir(): typer.secho( "It looks like you are in the root of a git repo dir.\n" "Exporting to out subdir." ) config.outdir.mkdir(exist_ok=True) outfile = config.outdir / outfile print(typer.style(f"\nWriting output to {outfile}... ", fg="cyan"), end="") outfile.write_text(outdata.file) # typer.unstyle(outdata) also works typer.secho("Done", fg="green")
async def update_site_db(self): resp = await self.central.get_all_sites() if resp.ok: resp.output = utils.listify(resp.output) # TODO time this to see which is more efficient # start = time.time() # upd = [self.SiteDB.upsert(site, cond=self.Q.id == site.get("id")) for site in site_resp.output] # upd = [item for in_list in upd for item in in_list] self.updated.append(self.central.get_all_sites) self.SiteDB.truncate() # print(f" site db Done: {time.time() - start}") return self.SiteDB.insert_multiple(resp.output)
def get_clients(data: List[dict], verbose: bool = False, cache: callable = None, **kwargs) -> list: """Remove all columns that are NA for all clients in the list""" data = utils.listify(data) data = [ _client_concat_associated_dev(d, verbose=verbose, cache=cache, **kwargs) for d in data ] if verbose: strip_keys = constants.CLIENT_STRIP_KEYS_VERBOSE if data and all([isinstance(d, dict) for d in data]): all_keys = set([k for d in data for k in d]) data = [ dict( short_value( k, d.get(k), ) for k in all_keys if k not in strip_keys) for d in data ] else: _sort_keys = [ "name", "macaddr", "vlan", "ip_address", "user_role", "network", "connection", "connected device", "gateway", "site", "group_name", "last_connection_time", ] if data and all([isinstance(d, dict) for d in data]): # all_keys = set([k for d in data for k in d]) data = [ dict(short_value( k, d.get(k), ) for k in _sort_keys) for d in data ] # data = [_client_concat_associated_dev(d, verbose=verbose, cache=cache, **kwargs) for d in data] data = strip_no_value(data) return data
async def update_site_db(self, data: Union[list, dict] = None, remove: bool = False) -> List[int]: # cli.cache.SiteDB.search(cli.cache.Q.id == del_list[0])[0].doc_id if data: data = utils.listify(data) if not remove: return self.SiteDB.insert_multiple(data) else: doc_ids = [] for qry in data: # provided list of site_ids to remove if isinstance(qry, (int, str)) and str(qry).isdigit(): doc_ids += [ self.SiteDB.get((self.Q.id == int(qry))).doc_id ] else: # list of dicts with {search_key: value_to_search_for} if len(qry.keys()) > 1: raise ValueError( f"cache.update_site_db remove Should only have 1 query not {len(qry.keys())}" ) q = list(qry.keys())[0] doc_ids += [ self.SiteDB.get((self.Q[q] == qry[q])).doc_id ] return self.SiteDB.remove(doc_ids=doc_ids) else: resp = await self.central.get_all_sites() if resp.ok: resp.output = utils.listify(resp.output) # TODO time this to see which is more efficient # start = time.time() # upd = [self.SiteDB.upsert(site, cond=self.Q.id == site.get("id")) for site in site_resp.output] # upd = [item for in_list in upd for item in in_list] self.updated.append(self.central.get_all_sites) self.SiteDB.truncate() # print(f" site db Done: {time.time() - start}") return self.SiteDB.insert_multiple(resp.output)
def get_lldp_neighbor(data: List[Dict[str, str]]) -> List[Dict[str, str]]: strip_keys = ["cid"] _short_val = { "1000BaseTFD - Four-pair Category 5 UTP, full duplex mode": "1000BaseT FD" } if len(data) > 1: data = { k: _short_val.get(d[k], d[k]) for d in data for k in d if d["localPort"] != "bond0" and k not in strip_keys } return strip_no_value(utils.listify(data))
async def update_group_db(self, data: Union[list, dict] = None, remove: bool = False) -> List[int]: if data: data = utils.listify(data) if not remove: return self.GroupDB.insert_multiple(data) else: doc_ids = [] for qry in data: if len(qry.keys()) > 1: raise ValueError( f"cache.update_group_db remove Should only have 1 query not {len(qry.keys())}" ) q = list(qry.keys())[0] doc_ids += [self.GroupDB.get((self.Q[q] == qry[q])).doc_id] return self.GroupDB.remove(doc_ids=doc_ids) else: resp = await self.central.get_all_groups() if resp.ok: resp.output = utils.listify(resp.output) self.updated.append(self.central.get_all_groups) self.GroupDB.truncate() return self.GroupDB.insert_multiple(resp.output)
def devices( args: List[str] = typer.Argument(None, metavar=iden_meta.dev, hidden=False), group: str = typer.Option(None, metavar="<Device Group>", help="Filter by Group", ), # TODO cli.cache group names label: str = typer.Option(None, metavar="<Device Label>", help="Filter by Label", ), status: StatusOptions = typer.Option(None, metavar="[up|down]", help="Filter by device status"), state: StatusOptions = typer.Option(None, hidden=True), # alias for status pub_ip: str = typer.Option(None, metavar="<Public IP Address>", help="Filter by Public IP"), do_stats: bool = typer.Option(False, "--stats", is_flag=True, help="Show device statistics"), do_clients: bool = typer.Option(False, "--clients", is_flag=True, help="Calculate client count (per device)"), sort_by: SortOptions = typer.Option(None, "--sort"), do_json: bool = typer.Option(False, "--json", is_flag=True, help="Output in JSON"), do_yaml: bool = typer.Option(False, "--yaml", is_flag=True, help="Output in YAML"), do_csv: bool = typer.Option(False, "--csv", is_flag=True, help="Output in CSV"), do_table: bool = typer.Option(False, "--table", help="Output in table format",), outfile: Path = typer.Option(None, "--out", help="Output to file (and terminal)", writable=True), no_pager: bool = typer.Option(False, "--no-pager", help="Disable Paged Output"), update_cache: bool = typer.Option(False, "-U", hidden=True), # Force Update of cli.cache for testing default: bool = typer.Option(False, "-d", is_flag=True, help="Use default central account", callback=cli.default_callback), debug: bool = typer.Option(False, "--debug", envvar="ARUBACLI_DEBUG", help="Enable Additional Debug Logging", callback=cli.debug_callback), account: str = typer.Option("central_info", envvar="ARUBACLI_ACCOUNT", help="The Aruba Central Account to use (must be defined in the config)", callback=cli.account_name_callback), ): type_to_link = { 'ap': 'aps', 'SW': 'switches', 'CX': 'switches', 'gateway': 'gateways' } if args and args[0] == 'all': dev_type = 'all' args = () if len(args) == 1 else args[1:] if args: dev = cli.cache.get_dev_identifier(args) args = utils.listify(args) dev_type = type_to_link.get(dev.type, dev.type) else: # show devices ... equiv to show all dev_type = 'all' show_devices( dev_type, *args, outfile=outfile, update_cache=update_cache, group=group, status=status, state=state, label=label, pub_ip=pub_ip, do_clients=do_clients, do_stats=do_stats, sort_by=sort_by, no_pager=no_pager, do_json=do_json, do_csv=do_csv, do_yaml=do_yaml, do_table=do_table)
def get_lldp_neighbor(data: List[Dict[str, str]]) -> List[Dict[str, str]]: strip_keys = ["cid"] pop_list = [] # strip bond0 interface same data++ reflected in eth0 interface for idx, neighbor in enumerate(data): if idx + 1 < len(data) and neighbor.get( "serial", "") == data[idx + 1].get("serial", "--"): if neighbor.get("localPort", "") == "bond0": pop_list += [idx] for i in pop_list: _ = data.pop(i) data = {k: v for d in data for k, v in d.items() if k not in strip_keys} return strip_no_value(utils.listify(data))
def get_clients(data: List[dict], **kwargs) -> list: """Remove all columns that are NA for all clients in the list""" data = utils.listify(data) if data and all([isinstance(d, dict) for d in data]): all_keys = set([k for d in data for k in d]) data = [ dict( short_value( k, d.get(k), ) for k in all_keys if k not in constants.CLIENT_STRIP_KEYS) for d in data ] data = [_client_concat_associated_dev(d, **kwargs) for d in data] data = strip_no_value(data) # strip_na = [[k for k, v in d.items() if str(v) == 'NA'] for d in data] # strip_na = set([i for o in strip_na for i in o]) # data = [dict(short_value(k, v) for k, v in d.items() if k not in strip_na) for d in data] return data
def sites(data: Union[List[dict], dict]) -> Union[List[dict], dict]: data = utils.listify(data) _sorted = [ "site_name", "site_id", "address", "city", "state", "zipcode", "country", "longitude", "latitude", "associated_device_count", ] # , "tags"] key_map = { "associated_device_count": "associated devices", "site_id": "id", "site_name": "name" } return _unlist([{key_map.get(k, k): s[k] for k in _sorted} for s in data if s.get("site_name", "") != "visualrf_default"])
def display_results( self, resp: Union[Response, List[Response]] = None, data: Union[List[dict], List[str], None] = None, tablefmt: str = "rich", title: str = None, caption: str = None, pager: bool = True, outfile: Path = None, sort_by: str = None, reverse: bool = None, pad: int = None, exit_on_fail: bool = False, ok_status: Union[int, List[int], Dict[int, str]] = None, cleaner: callable = None, **cleaner_kwargs, ) -> None: """Output Formatted API Response to display and optionally to file one of resp or data attribute is required Args: resp (Union[Response, List[Response], None], optional): API Response objects. data (Union[List[dict], List[str], None], optional): API Response output data. tablefmt (str, optional): Format of output. Defaults to "rich" (tabular). title: (str, optional): Title of output table. Only applies to "rich" tablefmt. Defaults to None. caption: (str, optional): Caption displayed at bottome of table. Only applies to "rich" tablefmt. Defaults to None. pager (bool, optional): Page Output / or not. Defaults to True. outfile (Path, optional): path/file of output file. Defaults to None. sort_by (Union[str, List[str], None] optional): column or columns to sort output on. reverse (bool, optional): reverse the output. ok_status (Union[int, List[int], Tuple[int, str], List[Tuple[int, str]]], optional): By default responses with status_code 2xx are considered OK and are rendered as green by Output class. provide int or list of int to override additional status_codes that should also be rendered as success/green. provide a dict with {int: str, ...} where string can be any color supported by Output class or "neutral" "success" "fail" where neutral is no formatting, and success / fail will use the default green / red respectively. cleaner (callable, optional): The Cleaner function to use. """ # TODO remove ok_status, and handle in CentralAPI method (set resp.ok = True) if pad: log.warning("Depricated pad parameter referenced in display_results") pager = False if config.no_pager else pager if resp is not None: resp = utils.listify(resp) # data = [] for idx, r in enumerate(resp): if len(resp) > 1: _url = r.url if not hasattr(r.url, "path") else r.url.path typer.secho(f"Request {idx + 1} [{r.method}: {_url}] Response:", fg="cyan") if not r or tablefmt == "action": fg = "green" if r else "red" typer.secho(str(r), fg=fg) if not r and exit_on_fail: raise typer.Exit(1) else: if str(r.rl) != "None": caption = f"{caption} [italic dark_olive_green2]{r.rl}".lstrip() self._display_results( r.output, tablefmt=tablefmt, title=title, caption=caption, pager=pager, outfile=outfile, sort_by=sort_by, reverse=reverse, pad=pad, cleaner=cleaner, **cleaner_kwargs ) elif data: self._display_results( data, tablefmt=tablefmt, title=title, caption=caption, pager=pager, outfile=outfile, sort_by=sort_by, reverse=reverse, pad=pad, cleaner=cleaner, **cleaner_kwargs )
def display_results( self, resp: Union[Response, List[Response]] = None, data: Union[List[dict], List[str], dict, None] = None, tablefmt: TableFormat = "rich", title: str = None, caption: str = None, pager: bool = False, outfile: Path = None, sort_by: str = None, reverse: bool = False, stash: bool = True, pad: int = None, exit_on_fail: bool = False, ok_status: Union[int, List[int], Dict[int, str]] = None, set_width_cols: dict = None, full_cols: Union[List[str], str] = [], fold_cols: Union[List[str], str] = [], cleaner: callable = None, **cleaner_kwargs, ) -> None: """Output Formatted API Response to display and optionally to file one of resp or data attribute is required Args: resp (Union[Response, List[Response], None], optional): API Response objects. data (Union[List[dict], List[str], None], optional): API Response output data. tablefmt (str, optional): Format of output. Defaults to "rich" (tabular). Valid Values: "json", "yaml", "csv", "rich", "simple", "tabulate", "raw", "action" Where "raw" is unformatted raw response and "action" is formatted for POST|PATCH etc. where the result is a simple success/error. title: (str, optional): Title of output table. Only applies to "rich" tablefmt. Defaults to None. caption: (str, optional): Caption displayed at bottome of table. Only applies to "rich" tablefmt. Defaults to None. pager (bool, optional): Page Output / or not. Defaults to True. outfile (Path, optional): path/file of output file. Defaults to None. sort_by (Union[str, List[str], None] optional): column or columns to sort output on. reverse (bool, optional): reverse the output. stash (bool, optional): stash (cache) the output of the command. The CLI can re-display with show last. Default: True ok_status (Union[int, List[int], Tuple[int, str], List[Tuple[int, str]]], optional): By default responses with status_code 2xx are considered OK and are rendered as green by Output class. provide int or list of int to override additional status_codes that should also be rendered as success/green. provide a dict with {int: str, ...} where string can be any color supported by Output class or "neutral" "success" "fail" where neutral is no formatting, and success / fail will use the default green / red respectively. set_width_cols (Dict[str: Dict[str, int]]): Passed to output function defines cols with min/max width example: {'details': {'min': 10, 'max': 30}, 'device': {'min': 5, 'max': 15}} full_cols (list): columns to ensure are displayed at full length (no wrap no truncate) cleaner (callable, optional): The Cleaner function to use. """ # TODO remove ok_status, and handle in CentralAPI method (set resp.ok = True) if pad: log.error("Deprecated pad parameter referenced in display_results", show=True) if resp is not None: resp = utils.listify(resp) # update caption with rate limit if resp[-1].rl: rl_str = f"[italic dark_olive_green2]{resp[-1].rl}[/]".lstrip() caption = f"{caption}\n {rl_str}" if caption else f" {rl_str}" for idx, r in enumerate(resp): # Multi request url line m_colors = { "GET": "bright_green", "DELETE": "red", "PATH": "dark_orange3", "PUT": "dark_orange3", "POST": "dark_orange3" } fg = "bright_green" if r else "red" conditions = [ len(resp) > 1, tablefmt in ["action", "raw"], r.ok and not r.output ] if any(conditions): _url = r.url if not hasattr(r.url, "raw_path_qs") else r.url.path m_color = m_colors.get(r.method, "reset") print(f"Request {idx + 1} [[{m_color}]{r.method}[reset]: " f"[cyan]{_url}[/cyan]]\n [fg]Response[reset]:") if self.raw_out: tablefmt = "raw" if not r.output: c = Console(record=True) c.begin_capture() c.print(f" Status Code: [{fg}]{r.status}[/]") c.print( f" :warning: Empty Response. This may be normal.") r.output = c.end_capture() if not r or tablefmt in ["action", "raw"]: if tablefmt == "raw": # dots = f"[{fg}]{'.' * 16}[/{fg}]" status_code = f"[{fg}]status code: {r.status}[/{fg}]" print(r.url) print(status_code) if not r.ok: print(r.error) # print(f"{dots}\n{status_code}\n{dots}") print( "[bold cyan]Unformatted response from Aruba Central API GW[/bold cyan]" ) print(r.raw) if outfile: self.write_file(outfile, r.raw) else: print(f"[{fg}]{r}") if idx + 1 == len(resp): console.print(f"\n{rl_str}") else: self._display_results(r.output, tablefmt=tablefmt, title=title, caption=caption, pager=pager, outfile=outfile, sort_by=sort_by, reverse=reverse, stash=stash, pad=pad, set_width_cols=set_width_cols, full_cols=full_cols, fold_cols=fold_cols, cleaner=cleaner, **cleaner_kwargs) # TODO make elegant caas send-cmds uses this logic if cleaner and cleaner.__name__ == "parse_caas_response": print(caption) if exit_on_fail and not all([r.ok for r in resp]): raise typer.Exit(1) elif data: self._display_results(data, tablefmt=tablefmt, title=title, caption=caption, pager=pager, outfile=outfile, sort_by=sort_by, reverse=reverse, stash=stash, pad=pad, set_width_cols=set_width_cols, full_cols=full_cols, fold_cols=fold_cols, cleaner=cleaner, **cleaner_kwargs)
def do_lldp_rename(fstr: str, **kwargs) -> Response: need_lldp = False if "%h" not in fstr and "%p" not in fstr else True # TODO get all APs then filter down after, stash down aps for easy subsequent call resp = cli.central.request(cli.central.get_devices, "aps", status="Up", **kwargs) if not resp: cli.display_results(resp, exit_on_fail=True) elif not resp.output: filters = ", ".join([f"{k}: {v}" for k, v in kwargs.items()]) resp.output = { "description": "API called was successful but returned no results.", "error": f"No Up APs found matching provided filters ({filters})." } resp.ok = False cli.display_results(resp, exit_on_fail=True) _all_aps = utils.listify(resp.output) _keys = ["name", "macaddr", "model", "site", "serial"] ap_dict = {d["serial"]: {k if k != "macaddr" else "mac": d[k] for k in d if k in _keys} for d in _all_aps} fstr_to_key = { "h": "neighborHostName", "m": "mac", "p": "remotePort", "M": "model", "S": "site", "s": "serial" } req_list, name_list, shown_promt = [], [], False if not ap_dict: log.error("Something went wrong, no ap_dict provided or empty", show=True) raise typer.Exit(1) num_calls = len(ap_dict) * 3 if need_lldp else len(ap_dict) * 2 if len(ap_dict) > 5: _warn = "\n\n[blink bright_red blink]WARNING[reset]" if need_lldp: _warn = f"{_warn} Format provided requires details about the upstream switch.\n" _warn = f"{_warn} This automation will result in [cyan]{num_calls}[/] API calls. 3 per AP.\n" _warn = f"{_warn} 1 to gather details about the upstream switch\n" else: _warn = f"{_warn} This automation will result in [cyan]{num_calls}[/] API calls, 1 for each AP.\n" _warn = f"{_warn} 1 to get the aps current settings (all settings need to be provided during the update, only the name changes).\n" _warn = f"{_warn} 1 to Update the settings / rename the AP.\n" _warn = f"{_warn}\n Current daily quota: [bright_green]{resp.rl.remain_day}[/] calls remaining\n" print(_warn) if resp.rl.remain_day < num_calls: print(f" {resp.rl}") print(f" More calls required {num_calls} than what's remaining in daily quota {resp.rl.remain_day}.") typer.confirm("Proceed:", abort=True) if need_lldp: ap_dict = _get_lldp_dict(ap_dict) # TODO refactor and use a template string or j2 something should already exist for this stuff. for ap in ap_dict: ap_dict[ap]["mac"] = utils.Mac(ap_dict[ap]["mac"]).clean while True: st = 0 x = '' try: for idx, c in enumerate(fstr): if not idx >= st: continue if c == '%': if fstr[idx + 1] not in fstr_to_key.keys(): _e1 = typer.style( f"Invalid source specifier ({fstr[idx + 1]}) in format string {fstr}: ", fg="red" ) _e2 = "Valid values:\n{}".format( ", ".join(fstr_to_key.keys()) ) typer.echo(f"{_e1}\n{_e2}") raise KeyError(f"{fstr[idx + 1]} is not valid") _src = ap_dict[ap][fstr_to_key[fstr[idx + 1]]] if fstr[idx + 2] != "[": if fstr[idx + 2] == "%" or fstr[idx + 3] == "%": x = f'{x}{_src}' st = idx + 2 elif fstr[idx + 2:idx + 4] == "((": # +3 should also be ( _from = fstr[idx + 4] _to = fstr[idx + 6] if not fstr[idx + 5] == ",": typer.secho( f"expected a comma at character {idx + 1 + 5} but found {fstr[idx + 5]}\n" "will try to proceed.", fg="bright_red" ) if not fstr[idx + 7:idx + 9] == "))": typer.secho( f"expected a )) at characters {idx + 1 + 7}-{idx + 1 + 8} " f"but found {fstr[idx + 7]}{fstr[idx + 8]}\n" "will try to proceed.", fg="bright_red" ) x = f'{x}{_src.replace(_from, _to)}' st = idx + 9 else: try: fi = _get_full_int(fstr[idx + 3:]) x = f'{x}{_src.split(fstr[idx + 2])[fi.i]}' st = idx + 3 + len(fi) except IndexError: _e1 = ", ".join(_src.split(fstr[idx + 2])) _e2 = len(_src.split(fstr[idx + 2])) typer.secho( f"\nCan't use segment {fi.o} of '{_e1}'\n" f" It only has {_e2} segments.\n", fg="red" ) raise else: # +2 is '[' if fstr[idx + 3] == "-": try: fi = _get_full_int(fstr[idx + 4:]) x = f'{x}{"".join(_src[-fi.o:])}' st = idx + 4 + len(fi) + 1 # +1 for closing ']' except IndexError: typer.secho( f"Can't extract the final {fi.o} characters from {_src}" f"It's only {len(_src)} characters." ) raise else: # +2 is '[' +3: should be int [1:4] fi = _get_full_int(fstr[idx + 3:]) fi2 = _get_full_int(fstr[idx + 3 + len(fi) + 1:]) # +1 for expected ':' if len(_src[slice(fi.i, fi2.o)]) < fi2.o - fi.i: _e1 = typer.style( f"\n{fstr} wants to take characters " f"\n{fi.o} through {fi2.o}" f"\n\"from {_src}\" (slice ends at character {len(_src[slice(fi.i, fi2.o)])}).", fg="red" ) if not shown_promt and typer.confirm( f"{_e1}" f"\n\nResult will be \"" f"{typer.style(''.join(_src[slice(fi.i, fi2.o)]), fg='bright_green')}\"" " for this segment." "\nOK to continue?" ): shown_promt = True x = f'{x}{"".join(_src[slice(fi.i, fi2.o)])}' st = idx + 3 + len(fi) + len(fi2) + 2 # +2 for : and ] else: raise typer.Abort() else: x = f'{x}{c}' req_list += [cli.central.BatchRequest(cli.central.update_ap_settings, (ap, x))] name_list += [f" {x}"] break except typer.Abort: fstr = _lldp_rename_get_fstr() except Exception as e: log.exception(f"LLDP rename exception while parsing {fstr}\n{e}", show=log.DEBUG) print(f"\nThere Appears to be a problem with [red]{fstr}[/]: {e.__class__.__name__}") if typer.confirm("Do you want to edit the fomat string and try again?", abort=True): fstr = _lldp_rename_get_fstr() print(f"[bright_green]Resulting AP names based on '{fstr}':") if len(name_list) <= 6: typer.echo("\n".join(name_list)) else: typer.echo("\n".join( [ *name_list[0:3], " ...", *name_list[-3:] ] ) ) if typer.confirm("Proceed with AP Rename?", abort=True): return cli.central.batch_request(req_list)
def sort_result_keys(data: List[dict], order: List[str] = None) -> List[dict]: data = utils.listify(data) all_keys = list(set([ik for k in data for ik in k.keys()])) ip_word = "ipv4" if "ipv4" in all_keys else "ip_address" mask_word = "ipv4_mask" if "ipv4_mask" in all_keys else "subnet_mask" # concat ip_address & subnet_mask fields into single ip field ip/mask if ip_word in all_keys and mask_word in all_keys: for inner in data: if inner[ip_word] and inner[mask_word]: mask = ipaddress.IPv4Network( (inner[ip_word], inner[mask_word]), strict=False).prefixlen inner[ip_word] = f"{inner[ip_word]}/{mask}" del inner[mask_word] if order: to_front = order else: to_front = [ "vlan_id", "name", "status", "type", "model", 'mode', "vlan_desc", "id", # pvid for VLAN output "ip", "ip_address", "ipaddress", "ipv4", "subnet_mask", "ipv4_mask" "serial", "macaddr", "mac", "serial", "ap_deployment_mode", "group_name", "group", "site", "labels", "addr_mode", "admin_mode", "oper_mode", "untagged_ports", "tagged_ports", "is_management_vlan", "is_jumbo_enabled", "is_voice_enabled", "is_igmp_enabled", "uptime", 'reboot_reason', 'cpu_utilization', 'mem_total', 'mem_free', 'firmware_version', 'version', 'firmware_backup_version', 'oper_state_reason', ] to_front = [i for i in to_front if i in all_keys] _ = [ all_keys.insert(0, all_keys.pop(all_keys.index(tf))) for tf in to_front[::-1] ] data = [{k: id.get(k) for k in all_keys} for id in data] return data
def get_clients(data: List[dict], verbose: bool = False, cache: callable = None, filters: List[str] = None, **kwargs) -> list: """Remove all columns that are NA for all clients in the list""" data = utils.listify(data) data = [ _client_concat_associated_dev(d, verbose=verbose, cache=cache, **kwargs) for d in data ] if verbose: strip_keys = constants.CLIENT_STRIP_KEYS_VERBOSE if data and all([isinstance(d, dict) for d in data]): all_keys = set([k for d in data for k in d]) data = [ dict( short_value( k, d.get(k), ) for k in all_keys if k not in strip_keys) for d in data ] else: _sort_keys = [ "name", "macaddr", "vlan", "ip_address", "user_role", "network", "connection", "connected device", "gateway", "site", "group_name", "last_connection_time", ] if data and all([isinstance(d, dict) for d in data]): # all_keys = set([k for d in data for k in d]) data = [ dict( short_value( k, f"wired ({data[idx].get('interface_port', '?')})" if d.get(k) == "NA" and k == "network" else d.get(k), ) for k in _sort_keys) for idx, d in enumerate(data) ] if filters: _filter = "~|~".join(filters) data = [ d for d in data if d["connected device"].lower() in _filter.lower() ] # data = [_client_concat_associated_dev(d, verbose=verbose, cache=cache, **kwargs) for d in data] data = strip_no_value(data) return data
def _display_results( self, data: Union[List[dict], List[str], dict, None] = None, tablefmt: str = "rich", title: str = None, caption: str = None, pager: bool = False, outfile: Path = None, sort_by: str = None, reverse: bool = False, stash: bool = True, pad: int = None, set_width_cols: dict = None, full_cols: Union[List[str], str] = [], fold_cols: Union[List[str], str] = [], cleaner: callable = None, **cleaner_kwargs, ): if data: data = utils.listify(data) if cleaner and not self.raw_out: data = cleaner(data, **cleaner_kwargs) data = utils.listify(data) if sort_by and all(isinstance(d, dict) for d in data): if sort_by not in data[0] and sort_by.replace("_", " ") in data[0]: sort_by = sort_by.replace("_", " ") if not all([True if sort_by in d else False for d in data]): print( f":x: [dark_orange3]Error: [cyan]{sort_by}[reset] does not appear to be a valid field" ) print("Valid Fields:\n----------\n{}\n----------".format( "\n".join(data[0].keys()))) else: try: type_ = str for d in data: if d[sort_by] is not None: type_ = type(d[sort_by]) break data = sorted(data, key=lambda d: d[sort_by] if d[sort_by] != "-" else 0 or 0 if type_ == int else "") except TypeError as e: print( f":x: [dark_orange3]Warning:[reset] Unable to sort by [cyan]{sort_by}.\n {e.__class__.__name__}: {e} " ) if reverse: data = data[::-1] if self.raw_out and tablefmt in ["simple", "rich"]: tablefmt = "json" # TODO make sure "account" is not valid then remove from list below if config.account == "account": log.warning("DEV NOTE account is 'account'", show=True) kwargs = { "outdata": data, "tablefmt": tablefmt, "title": title, "caption": caption, "account": None if config.account in ["central_info", "default", "account"] else config.account, "config": config, "set_width_cols": set_width_cols, "full_cols": full_cols, "fold_cols": fold_cols, } outdata = utils.output(**kwargs) if stash: config.last_command_file.write_text( json.dumps( {k: v for k, v in kwargs.items() if k != "config"})) typer.echo_via_pager(outdata) if pager and tty and len( outdata) > tty.rows else typer.echo(outdata) if "Limit:" not in outdata and caption is not None and cleaner and cleaner.__name__ != "parse_caas_response": print(caption) if outfile and outdata: self.write_file(outfile, outdata.file) else: log.warning(f"No data passed to _display_output {title} {caption}")
def do_lldp_rename(fstr: str, **kwargs) -> Response: br = cli.central.BatchRequest resp = cli.central.request(cli.central.get_devices, "aps", status="Up", **kwargs) if not resp: cli.display_results(resp, exit_on_fail=True) elif not resp.output: filters = ", ".join([f"{k}: {v}" for k, v in kwargs.items()]) resp.output = { "description": "API called was successful but returned no results.", "error": f"No Up APs found matching provided filters ({filters})." } resp.ok = False cli.display_results(resp, exit_on_fail=True) _all_aps = utils.listify(resp.output) _keys = ["name", "mac", "model"] ap_dict = {d["serial"]: {k: d[k] for k in d if k in _keys} for d in _all_aps} fstr_to_key = { "h": "neighborHostName", "m": "mac", "p": "remotePort", "M": "model", "S": "site", } req_list, name_list, shown_promt = [], [], False if ap_dict: lldp_reqs = [br(cli.central.get_ap_lldp_neighbor, ap) for ap in ap_dict] lldp_resp = cli.central.batch_request(lldp_reqs) if not all(r.ok for r in lldp_resp): log.error("Error occured while gathering lldp neighbor info", show=True) cli.display_results(lldp_resp, exit_on_fail=True) lldp_dict = {d.output[-1]["serial"]: {k: v for k, v in d.output[-1].items()} for d in lldp_resp} ap_dict = { ser: { **val, "neighborHostName": lldp_dict[ser]["neighborHostName"], "remotePort": lldp_dict[ser]["remotePort"], "site": None if "%S" not in fstr else cli.cache.get_dev_identifier(lldp_dict[ser]["neighborSerial"]) } for ser, val in ap_dict.items() } for ap in ap_dict: ap_dict[ap]["mac"] = utils.Mac(ap_dict[ap]["mac"]).clean # _lldp = cli.central.request(cli.central.get_ap_lldp_neighbor, ap) # if _lldp: # ap_dict[ap]["neighborHostName"] = _lldp.output[-1]["neighborHostName"] # ap_dict[ap]["remotePort"] = _lldp.output[-1]["remotePort"] while True: st = 0 x = '' try: for idx, c in enumerate(fstr): if not idx >= st: continue if c == '%': if fstr[idx + 1] not in fstr_to_key.keys(): _e1 = typer.style( f"Invalid source specifier ({fstr[idx + 1]}) in format string {fstr}: ", fg="red" ) _e2 = "Valid values:\n{}".format( ", ".join(fstr_to_key.keys()) ) typer.echo(f"{_e1}\n{_e2}") raise KeyError(f"{fstr[idx + 1]} is not valid") _src = ap_dict[ap][fstr_to_key[fstr[idx + 1]]] if fstr[idx + 2] != "[": if fstr[idx + 2] == "%" or fstr[idx + 3] == "%": x = f'{x}{_src}' st = idx + 2 elif fstr[idx + 2:idx + 4] == "((": # +3 should also be ( _from = fstr[idx + 4] _to = fstr[idx + 6] if not fstr[idx + 5] == ",": typer.secho( f"expected a comma at character {idx + 1 + 5} but found {fstr[idx + 5]}\n" "will try to proceed.", fg="bright_Red" ) if not fstr[idx + 7:idx + 9] == "))": typer.secho( f"expected a )) at characters {idx + 1 + 7}-{idx + 1 + 8} " f"but found {fstr[idx + 7]}{fstr[idx + 8]}\n" "will try to proceed.", fg="bright_Red" ) x = f'{x}{_src.replace(_from, _to)}' st = idx + 9 else: try: fi = _get_full_int(fstr[idx + 3:]) x = f'{x}{_src.split(fstr[idx + 2])[fi.i]}' st = idx + 3 + len(fi) except IndexError: _e1 = ", ".join(_src.split(fstr[idx + 2])) _e2 = len(_src.split(fstr[idx + 2])) typer.secho( f"\nCan't use segment {fi.o} of '{_e1}'\n" f" It only has {_e2} segments.\n", fg="red" ) raise else: # +2 is '[' if fstr[idx + 3] == "-": try: fi = _get_full_int(fstr[idx + 4:]) x = f'{x}{"".join(_src[-fi.o:])}' st = idx + 4 + len(fi) + 1 # +1 for closing ']' except IndexError: typer.secho( f"Can't extract the final {fi.o} characters from {_src}" f"It's only {len(_src)} characters." ) raise else: # +2 is '[' +3: should be int [1:4] fi = _get_full_int(fstr[idx + 3:]) fi2 = _get_full_int(fstr[idx + 3 + len(fi) + 1:]) # +1 for expected ':' if len(_src[slice(fi.i, fi2.o)]) < fi2.o - fi.i: _e1 = typer.style( f"\n{fstr} wants to take characters " f"\n{fi.o} through {fi2.o}" f"\n\"from {_src}\" (slice ends at character {len(_src[slice(fi.i, fi2.o)])}).", fg="red" ) if not shown_promt and typer.confirm( f"{_e1}" f"\n\nResult will be \"" f"{typer.style(''.join(_src[slice(fi.i, fi2.o)]), fg='bright_green')}\"" " for this segment." "\nOK to continue?" ): shown_promt = True x = f'{x}{"".join(_src[slice(fi.i, fi2.o)])}' st = idx + 3 + len(fi) + len(fi2) + 2 # +2 for : and ] else: raise typer.Abort() else: x = f'{x}{c}' req_list += [cli.central.BatchRequest(cli.central.update_ap_settings, (ap, x))] name_list += [f" {x}"] break except typer.Abort: fstr = _lldp_rename_get_fstr() except Exception as e: log.exception(f"LLDP rename exception while parsing {fstr}\n{e}", show=log.DEBUG) typer.secho(f"\nThere Appears to be a problem with {fstr}: {e.__class__.__name__}", fg="red") if typer.confirm("Do you want to edit the fomat string and try again?", abort="True"): fstr = _lldp_rename_get_fstr() typer.secho(f"Resulting AP names based on '{fstr}':", fg="bright_green") if len(name_list) <= 6: typer.echo("\n".join(name_list)) else: typer.echo("\n".join( [ *name_list[0:3], "...", *name_list[-3:] ] ) ) if typer.confirm("Proceed with AP Rename?", abort=True): return cli.central.batch_request(req_list)
def send_cmds( kw1: constants.SendCmdArgs = typer.Argument( ..., ), nodes: str = typer.Argument( None, autocompletion=cache.send_cmds_completion, metavar=iden.group_or_dev_or_site, # callback=cli.send_cmds_node_callback, # is_eager=True, ), kw2: str = typer.Argument( None, autocompletion=cache.send_cmds_completion, # callback=cli.send_cmds_node_callback, ), commands: List[str] = typer.Argument(None, callback=cli.send_cmds_node_callback), cmd_file: Path = typer.Option(None, help="Path to file containing commands (1 per line) to be sent to device", exists=True), # dev_file: Path = typer.Option(None, help="Path to file containing iden for devices to send commands to", exists=True), # group: bool = typer.Option(None, help="Send commands to all gateways in a group", autocompletion=cli.cache.group_completion), # site: bool = typer.Option(None, help="Send commands to all gateways in a site", autocompletion=cli.cache.site_completion), all: bool = typer.Option(False, "-A", help="Send command(s) to all gateways (device level update) when group is provided"), yes: bool = typer.Option(False, "-Y", help="Bypass confirmation prompts - Assume Yes"), yes_: bool = typer.Option(False, "-y", hidden=True), default: bool = typer.Option(False, "-d", is_flag=True, help="Use default central account", callback=cli.default_callback), debug: bool = typer.Option(False, "--debug", envvar="ARUBACLI_DEBUG", help="Enable Additional Debug Logging", callback=cli.debug_callback), account: str = typer.Option("central_info", envvar="ARUBACLI_ACCOUNT", help="The Aruba Central Account to use (must be defined in the config)", callback=cli.account_name_callback), ) -> None: console = Console(emoji=False) yes = yes if yes else yes_ commands = commands or [] if kw1 == "group": if all: g = cache.get_group_identifier(nodes) nodes = [cache.CentralObject(d) for d in cache.devices if d["type"] == "gw" and d["group"] == g.name] action = f"all devices in {g.name} group." else: nodes = cache.get_group_identifier(nodes) action = f"group level gateway config for {nodes.name} group." elif kw1 == "site": s = cache.get_group_identifier(nodes) nodes = [cache.CentralObject(d) for d in cache.devices if d["type"] == "gw" and d["site"] == s.name] action = f"all devices in site: {s.name}" elif kw1 == "file": dev_file = Path(nodes) file_data = config.get_file_data(dev_file, text_ok=True) if not file_data: print(f"No data parsed from file {dev_file.name}.") raise typer.Exit(1) if isinstance(file_data, list): nodes = [cache.get_identifier(d.strip(), ["dev", "group", "site"], device_type="gw") for d in file_data] else: devices = file_data.get("devices", file_data.get("gateways")) if devices: nodes = [cache.get_identifier(d.strip(), ["dev", "group", "site"], device_type="gw") for d in file_data["devices"]] elif "groups" in file_data: nodes = [cache.CentralObject(d) for d in cache.devices if d["type"] == "gw" and d["group"] in file_data["groups"]] elif "sites" in file_data: nodes = [cache.CentralObject(d) for d in cache.devices if d["type"] == "gw" and d["site"] in file_data["sites"]] else: print(f"Expected 'gateways', 'groups', or 'sites' key in {dev_file.name}.") raise typer.Exit(1) if "cmds" in file_data or "commands" in file_data: if commands: print("Providing commands on the command line and in the import file is a strange thing to do.") raise typer.Exit(1) commands = file_data.get("cmds", file_data.get("commands")) elif kw1 == "device": if not isinstance(nodes, str): print(f"nodes is of type {type(nodes)} this is unexpected.") nodes = [cache.get_identifier(nodes, ["dev"], "gw")] if cmd_file: if commands: print("Providing commands on the command line and in the import file is a strange thing to do.") raise typer.Exit(1) else: commands = [line.rstrip() for line in cmd_file.read_text().splitlines()] if not commands: print("Error No commands provided") raise typer.Exit(1) if yes or typer.confirm("\nProceed?", abort=True): caasapi = caas.CaasAPI(central=cli.central) _reqs = [ cli.central.BatchRequest( caasapi.send_commands, n.name if not n.is_dev else n.mac, cli_cmds=commands ) for n in utils.listify(nodes) ] batch_res = cli.central.batch_request(_reqs) cli.display_results(batch_res, cleaner=cleaner.parse_caas_response)