def check_fresh( self, refresh: bool = False, site_db: bool = False, dev_db: bool = False, template_db: bool = False, group_db: bool = False, ) -> None: if True in [site_db, dev_db, group_db, template_db]: refresh = True if refresh or not config.cache_file.is_file( ) or not config.cache_file.stat().st_size > 0: # or time.time() - config.cache_file.stat().st_mtime > 7200: start = time.time() print(typer.style("-- Refreshing Identifier mapping Cache --", fg="cyan"), end="") db_res = asyncio.run( self._check_fresh(dev_db=dev_db, site_db=site_db, template_db=template_db, group_db=group_db)) if db_res and False in db_res: log.error("TinyDB returned an error during db update") log.info( f"Cache Refreshed in {round(time.time() - start, 2)} seconds") typer.secho( f"-- Cache Refresh Completed in {round(time.time() - start, 2)} sec --", fg="cyan")
def get_template_identifier( self, query_str: str, ret_field: str = "name", group: str = None, retry: bool = True, multi_ok: bool = False, ) -> CentralObject: """Allows case insensitive template match by template name""" match = None for _ in range(0, 2 if retry else 1): match = self.TemplateDB.search( (self.Q.name == query_str) | self.Q.name.test(lambda v: v.lower() == query_str.lower())) if not match: match = self.TemplateDB.search( self.Q.name.test(lambda v: v.lower() == query_str.lower(). replace("_", "-"))) if not match: match = self.TemplateDB.search( self.Q.name.test( lambda v: v.lower().startswith(query_str.lower()))) if retry and not match and self.central.get_all_templates not in self.updated: typer.secho( f"No Match Found for {query_str}, Updating template Cachce", fg="red") self.check_fresh(refresh=True, template_db=True) if match: break if match: if len(match) > 1: if group: match = [{k: d[k] for k in d} for d in match if d["group"].lower() == group.lower()] if len(match) > 1: match = self.handle_multi_match( match, query_str=query_str, query_type="template", multi_ok=multi_ok, ) return CentralObject("template", match) elif retry: log.error( f"Unable to gather template {ret_field} from provided identifier {query_str}", show=True) raise typer.Abort() else: log.warning( f"Unable to gather template {ret_field} from provided identifier {query_str}", show=False)
async def show_config(self, group: str, dev_mac: str = None) -> Response: show_url = "/caasapi/v1/showcommand" url = f"{show_url}/object/committed?group_name={group}" if dev_mac: mac = utils.Mac(dev_mac) if mac: url = f"{url}/{mac.url}" else: log.error( f"{dev_mac} does not appear to be a valid MAC address.", show=True) raise typer.Exit(1) return await self.central.get(url)
def __init__( self, response: ClientResponse = None, url: Union[URL, str] = "", ok: bool = None, error: str = None, output: Any = {}, raw: Any = {}, status_code: int = None, elapsed: Union[int, float] = 0, rl_str: str = None, ): self.rl = rl_str or RateLimit(response) self._response = response self.output = output self.raw = raw self.ok = ok self.method = "" if response: self.ok = response.ok self.url = response.url self.error = response.reason self.status = response.status self.method = response.method _offset_str = "" if "offset" in response.url.query and int(response.url.query['offset']) > 0: _offset_str = f" offset: {response.url.query['offset']} limit: {response.url.query.get('limit', '?')}" _log_msg = f"[{response.reason}] {response.method}:{response.url.path}{_offset_str} Elapsed: {elapsed:.2f}" if not self.ok: self.output = self.output or self.error log.error(_log_msg) else: log.info(_log_msg) else: if error: self.ok = ok or False self.error = error self.output = output or error elif output or isinstance(output, (list, dict)): # empty list or dict, when used as constructor still ok self.ok = ok or True self.error = error or "OK" self.url = URL(url) self.status = status_code or 299 if self.ok else 418 if self.output and "error" in self.output and "error_description" in self.output and isinstance(self.output, dict): self.output = f"{self.output['error']}: {self.output['error_description']}"
def get_group_identifier( self, query_str: str, ret_field: str = "name", retry: bool = True, multi_ok: bool = False, ) -> CentralObject: """Allows Case insensitive group match""" for _ in range(0, 2): match = self.GroupDB.search( (self.Q.name == query_str) | self.Q.name.test(lambda v: v.lower() == query_str.lower())) if retry and not match and self.central.get_all_groups not in self.updated: typer.secho( f"No Match Found for {query_str}, Updating group Cachce", fg="red") self.check_fresh(refresh=True, group_db=True) if match: break if match: if len(match) > 1: match = self.handle_multi_match(match, query_str=query_str, query_type="group", multi_ok=multi_ok) return CentralObject("group", match) elif retry: log.error( f"Unable to gather group {ret_field} from provided identifier {query_str}", show=True) valid_groups = "\n".join(self.group_names) typer.secho(f"{query_str} appears to be invalid", fg="red") typer.secho(f"Valid Groups:\n--\n{valid_groups}\n--\n", fg="cyan") raise typer.Abort() else: log.error( f"Unable to gather template {ret_field} from provided identifier {query_str}", show=True)
def _get_lldp_dict(ap_dict: dict) -> dict: """Updates provided dict of APs keyed by AP serial number with lldp neighbor info """ br = cli.central.BatchRequest lldp_reqs = [br(cli.central.get_ap_lldp_neighbor, ap) for ap in ap_dict] lldp_resp = cli.central.batch_request(lldp_reqs) if not all(r.ok for r in lldp_resp): log.error("Error occured while gathering lldp neighbor info", show=True) cli.display_results(lldp_resp, exit_on_fail=True) lldp_dict = {d.output[-1]["serial"]: {k: v for k, v in d.output[-1].items()} for d in lldp_resp} ap_dict = { ser: { **val, "neighborHostName": lldp_dict[ser]["neighborHostName"], "remotePort": lldp_dict[ser]["remotePort"], } for ser, val in ap_dict.items() } return ap_dict
def check_fresh( self, refresh: bool = False, site_db: bool = False, dev_db: bool = False, template_db: bool = False, group_db: bool = False, ) -> None: if True in [site_db, dev_db, group_db, template_db]: refresh = True if refresh or not config.cache_file.is_file( ) or not config.cache_file.stat().st_size > 0: # or time.time() - config.cache_file.stat().st_mtime > 7200: start = time.time() print(typer.style("-- Refreshing Identifier mapping Cache --", fg="cyan"), end="") db_res = asyncio.run( self._check_fresh(dev_db=dev_db, site_db=site_db, template_db=template_db, group_db=group_db)) if db_res and False in db_res: res_map = ["dev_db", "site_db", "template_db", "group_db"] res_map = ", ".join( [db for idx, db in enumerate(res_map) if not db_res(idx)]) log.error( f"TinyDB returned error ({res_map}) during db update") self.central.spinner.fail( f"Cache Refresh Returned an error updating ({res_map})") else: self.central.spinner.succeed( f"Cache Refresh Completed in {round(time.time() - start, 2)} sec" ) log.info( f"Cache Refreshed in {round(time.time() - start, 2)} seconds")
def get_site_identifier( self, query_str: Union[str, List[str], tuple], ret_field: str = "id", retry: bool = True, multi_ok: bool = False, ) -> CentralObject: if isinstance(query_str, (list, tuple)): query_str = " ".join(query_str) match = None for _ in range(0, 2 if retry else 1): # try exact site match match = self.SiteDB.search( (self.Q.name == query_str) | (self.Q.id.test(lambda v: str(v) == query_str)) | (self.Q.zipcode == query_str) | (self.Q.address == query_str) | (self.Q.city == query_str) | (self.Q.state == query_str)) # retry with case insensitive name & address match if no match with original query if not match: match = self.SiteDB.search( (self.Q.name.test(lambda v: v.lower() == query_str.lower()) ) | self.Q.address.test(lambda v: v.lower().replace( " ", "") == query_str.lower().replace(" ", ""))) # retry name match swapping - for _ and _ for - if not match: if "-" in query_str: match = self.SiteDB.search( self.Q.name.test(lambda v: v.lower() == query_str. lower().replace("-", "_"))) elif "_" in query_str: match = self.SiteDB.search( self.Q.name.test(lambda v: v.lower() == query_str. lower().replace("_", "-"))) # Last Chance try to match name if it startswith provided value if not match: match = self.SiteDB.search( self.Q.name.test( lambda v: v.lower().startswith(query_str.lower()))) if retry and not match and self.central.get_all_sites not in self.updated: typer.secho( f"No Match Found for {query_str}, Updating Site Cachce", fg="red") self.check_fresh(refresh=True, site_db=True) if match: break if match: if len(match) > 1: match = self.handle_multi_match(match, query_str=query_str, query_type="site", multi_ok=multi_ok) # return match[0].get(ret_field) return CentralObject("site", match) elif retry: log.error( f"Unable to gather site {ret_field} from provided identifier {query_str}", show=True) raise typer.Abort()
def get_dev_identifier( self, query_str: Union[str, List[str], tuple], dev_type: str = None, ret_field: str = "serial", retry: bool = True, multi_ok: bool = True, ) -> CentralObject: # TODO dev_type currently not passed in or handled identifier for show switches would also # try to match APs ... & (self.Q.type == dev_type) # TODO refactor to single test function usable by all identifier methods 1 search with a more involved test if isinstance(query_str, (list, tuple)): query_str = " ".join(query_str) match = None for _ in range(0, 2 if retry else 1): # Try exact match match = self.DevDB.search( (self.Q.name == query_str) | (self.Q.ip.test(lambda v: v.split("/")[0] == query_str)) | (self.Q.mac == utils.Mac(query_str).cols) | (self.Q.serial == query_str)) # retry with case insensitive name match if no match with original query if not match: match = self.DevDB.search( (self.Q.name.test(lambda v: v.lower() == query_str.lower()) ) | self.Q.mac.test(lambda v: v.lower() == utils.Mac( query_str).cols.lower()) | self.Q.serial.test( lambda v: v.lower() == query_str.lower())) # retry name match swapping - for _ and _ for - if not match: if "-" in query_str: match = self.DevDB.search( self.Q.name.test(lambda v: v.lower() == query_str. lower().replace("-", "_"))) elif "_" in query_str: match = self.DevDB.search( self.Q.name.test(lambda v: v.lower() == query_str. lower().replace("_", "-"))) # Last Chance try to match name if it startswith provided value if not match: match = self.DevDB.search( self.Q.name.test( lambda v: v.lower().startswith(query_str.lower())) | self.Q.serial.test( lambda v: v.lower().startswith(query_str.lower())) | self.Q.mac.test(lambda v: v.lower().startswith( utils.Mac(query_str).cols.lower()))) if retry and not match and self.central.get_all_devicesv2 not in self.updated: typer.secho( f"No Match Found for {query_str}, Updating Device Cachce", fg="red") self.check_fresh(refresh=True, dev_db=True) if match: break all_match = None if dev_type: all_match = match match = [ d for d in match if d["type"].lower() in "".join( dev_type[0:len(d["type"])]).lower() ] if match: if len(match) > 1: match = self.handle_multi_match(match, query_str=query_str, multi_ok=multi_ok) return CentralObject("dev", match) elif retry: log.error( f"Unable to gather device {ret_field} from provided identifier {query_str}", show=True) if all_match: all_match = all_match[-1] log.error( f"The Following device matched {all_match.get('name')} excluded as {all_match.get('type')} != {dev_type}", show=True, ) raise typer.Abort()
def do_lldp_rename(fstr: str, **kwargs) -> Response: br = cli.central.BatchRequest resp = cli.central.request(cli.central.get_devices, "aps", status="Up", **kwargs) if not resp: cli.display_results(resp, exit_on_fail=True) elif not resp.output: filters = ", ".join([f"{k}: {v}" for k, v in kwargs.items()]) resp.output = { "description": "API called was successful but returned no results.", "error": f"No Up APs found matching provided filters ({filters})." } resp.ok = False cli.display_results(resp, exit_on_fail=True) _all_aps = utils.listify(resp.output) _keys = ["name", "mac", "model"] ap_dict = {d["serial"]: {k: d[k] for k in d if k in _keys} for d in _all_aps} fstr_to_key = { "h": "neighborHostName", "m": "mac", "p": "remotePort", "M": "model", "S": "site", } req_list, name_list, shown_promt = [], [], False if ap_dict: lldp_reqs = [br(cli.central.get_ap_lldp_neighbor, ap) for ap in ap_dict] lldp_resp = cli.central.batch_request(lldp_reqs) if not all(r.ok for r in lldp_resp): log.error("Error occured while gathering lldp neighbor info", show=True) cli.display_results(lldp_resp, exit_on_fail=True) lldp_dict = {d.output[-1]["serial"]: {k: v for k, v in d.output[-1].items()} for d in lldp_resp} ap_dict = { ser: { **val, "neighborHostName": lldp_dict[ser]["neighborHostName"], "remotePort": lldp_dict[ser]["remotePort"], "site": None if "%S" not in fstr else cli.cache.get_dev_identifier(lldp_dict[ser]["neighborSerial"]) } for ser, val in ap_dict.items() } for ap in ap_dict: ap_dict[ap]["mac"] = utils.Mac(ap_dict[ap]["mac"]).clean # _lldp = cli.central.request(cli.central.get_ap_lldp_neighbor, ap) # if _lldp: # ap_dict[ap]["neighborHostName"] = _lldp.output[-1]["neighborHostName"] # ap_dict[ap]["remotePort"] = _lldp.output[-1]["remotePort"] while True: st = 0 x = '' try: for idx, c in enumerate(fstr): if not idx >= st: continue if c == '%': if fstr[idx + 1] not in fstr_to_key.keys(): _e1 = typer.style( f"Invalid source specifier ({fstr[idx + 1]}) in format string {fstr}: ", fg="red" ) _e2 = "Valid values:\n{}".format( ", ".join(fstr_to_key.keys()) ) typer.echo(f"{_e1}\n{_e2}") raise KeyError(f"{fstr[idx + 1]} is not valid") _src = ap_dict[ap][fstr_to_key[fstr[idx + 1]]] if fstr[idx + 2] != "[": if fstr[idx + 2] == "%" or fstr[idx + 3] == "%": x = f'{x}{_src}' st = idx + 2 elif fstr[idx + 2:idx + 4] == "((": # +3 should also be ( _from = fstr[idx + 4] _to = fstr[idx + 6] if not fstr[idx + 5] == ",": typer.secho( f"expected a comma at character {idx + 1 + 5} but found {fstr[idx + 5]}\n" "will try to proceed.", fg="bright_Red" ) if not fstr[idx + 7:idx + 9] == "))": typer.secho( f"expected a )) at characters {idx + 1 + 7}-{idx + 1 + 8} " f"but found {fstr[idx + 7]}{fstr[idx + 8]}\n" "will try to proceed.", fg="bright_Red" ) x = f'{x}{_src.replace(_from, _to)}' st = idx + 9 else: try: fi = _get_full_int(fstr[idx + 3:]) x = f'{x}{_src.split(fstr[idx + 2])[fi.i]}' st = idx + 3 + len(fi) except IndexError: _e1 = ", ".join(_src.split(fstr[idx + 2])) _e2 = len(_src.split(fstr[idx + 2])) typer.secho( f"\nCan't use segment {fi.o} of '{_e1}'\n" f" It only has {_e2} segments.\n", fg="red" ) raise else: # +2 is '[' if fstr[idx + 3] == "-": try: fi = _get_full_int(fstr[idx + 4:]) x = f'{x}{"".join(_src[-fi.o:])}' st = idx + 4 + len(fi) + 1 # +1 for closing ']' except IndexError: typer.secho( f"Can't extract the final {fi.o} characters from {_src}" f"It's only {len(_src)} characters." ) raise else: # +2 is '[' +3: should be int [1:4] fi = _get_full_int(fstr[idx + 3:]) fi2 = _get_full_int(fstr[idx + 3 + len(fi) + 1:]) # +1 for expected ':' if len(_src[slice(fi.i, fi2.o)]) < fi2.o - fi.i: _e1 = typer.style( f"\n{fstr} wants to take characters " f"\n{fi.o} through {fi2.o}" f"\n\"from {_src}\" (slice ends at character {len(_src[slice(fi.i, fi2.o)])}).", fg="red" ) if not shown_promt and typer.confirm( f"{_e1}" f"\n\nResult will be \"" f"{typer.style(''.join(_src[slice(fi.i, fi2.o)]), fg='bright_green')}\"" " for this segment." "\nOK to continue?" ): shown_promt = True x = f'{x}{"".join(_src[slice(fi.i, fi2.o)])}' st = idx + 3 + len(fi) + len(fi2) + 2 # +2 for : and ] else: raise typer.Abort() else: x = f'{x}{c}' req_list += [cli.central.BatchRequest(cli.central.update_ap_settings, (ap, x))] name_list += [f" {x}"] break except typer.Abort: fstr = _lldp_rename_get_fstr() except Exception as e: log.exception(f"LLDP rename exception while parsing {fstr}\n{e}", show=log.DEBUG) typer.secho(f"\nThere Appears to be a problem with {fstr}: {e.__class__.__name__}", fg="red") if typer.confirm("Do you want to edit the fomat string and try again?", abort="True"): fstr = _lldp_rename_get_fstr() typer.secho(f"Resulting AP names based on '{fstr}':", fg="bright_green") if len(name_list) <= 6: typer.echo("\n".join(name_list)) else: typer.echo("\n".join( [ *name_list[0:3], "...", *name_list[-3:] ] ) ) if typer.confirm("Proceed with AP Rename?", abort=True): return cli.central.batch_request(req_list)
def get_template_identifier( self, query_str: str, ret_field: str = "name", group: str = None, retry: bool = True, multi_ok: bool = False, completion: bool = False, ) -> CentralObject: """Allows case insensitive template match by template name""" retry = False if completion else retry match = None for _ in range(0, 2 if retry else 1): # exact match = self.TemplateDB.search((self.Q.name == query_str)) # case insensitive if not match: match = self.TemplateDB.search( self.Q.name.test(lambda v: v.lower() == query_str.lower())) # case insensitive with -/_ swap if not match: if "_" in query_str: match = self.TemplateDB.search( self.Q.name.test(lambda v: v.lower() == query_str. lower().replace("_", "-"))) elif "-" in query_str: match = self.TemplateDB.search( self.Q.name.test(lambda v: v.lower() == query_str. lower().replace("-", "_"))) # startswith if not match: match = self.TemplateDB.search( self.Q.name.test( lambda v: v.lower().startswith(query_str.lower()))) if retry and not match and self.central.get_all_templates not in self.updated: typer.secho( f"No Match Found for {query_str}, Updating template Cache", fg="red") self.check_fresh(refresh=True, template_db=True) if match: match = [CentralObject("template", tmplt) for tmplt in match] break if match: if completion: return match if len(match) > 1: if group: match = [ d for d in match if d.group.lower() == group.lower() ] if len(match) > 1: match = self.handle_multi_match( match, query_str=query_str, query_type="template", multi_ok=multi_ok, ) return match[0] elif retry: log.error( f"Unable to gather template {ret_field} from provided identifier {query_str}", show=True) raise typer.Exit(1) else: if not completion: log.warning( f"Unable to gather template {ret_field} from provided identifier {query_str}", show=False)
def get_group_identifier( self, query_str: str, ret_field: str = "name", retry: bool = True, multi_ok: bool = False, completion: bool = False, ) -> CentralObject: """Allows Case insensitive group match""" retry = False if completion else retry for _ in range(0, 2): # Exact match match = self.GroupDB.search((self.Q.name == query_str)) # case insensitive if not match: match = self.GroupDB.search( self.Q.name.test(lambda v: v.lower() == query_str.lower())) # case insensitive ignore -_ if not match: if "_" in query_str or "-" in query_str: match = self.GroupDB.search( self.Q.name.test(lambda v: v.lower().strip("-_") == query_str.lower().strip("_-"))) # match = self.GroupDB.search( # self.Q.name.test( # lambda v: v.lower() == query_str.lower().replace("_", "-") # ) # ) # elif "-" in query_str: # match = self.GroupDB.search( # self.Q.name.test( # lambda v: v.lower() == query_str.lower().replace("-", "_") # ) # ) # case insensitive startswith if not match: match = self.GroupDB.search( self.Q.name.test( lambda v: v.lower().startswith(query_str.lower()))) # case insensitive startswith ignore - _ if not match: match = self.GroupDB.search( self.Q.name.test(lambda v: v.lower().strip( "-_").startswith(query_str.lower().strip("-_")))) if not match and retry and self.central.get_all_groups not in self.updated: typer.secho( f"No Match Found for {query_str}, Updating group Cache", fg="red") self.check_fresh(refresh=True, group_db=True) _ += 1 if match: match = [CentralObject("group", g) for g in match] break if match: if completion: return match if len(match) > 1: match = self.handle_multi_match(match, query_str=query_str, query_type="group", multi_ok=multi_ok) return match[0] elif retry: log.error( f"Central API CLI Cache unable to gather group data from provided identifier {query_str}", show=True) valid_groups = "\n".join(self.group_names) typer.secho(f"{query_str} appears to be invalid", fg="red") typer.secho(f"Valid Groups:\n--\n{valid_groups}\n--\n", fg="cyan") raise typer.Exit(1) else: if not completion: log.error( f"Central API CLI Cache unable to gather group data from provided identifier {query_str}", show=True)
def do_lldp_rename(fstr: str, **kwargs) -> Response: need_lldp = False if "%h" not in fstr and "%p" not in fstr else True # TODO get all APs then filter down after, stash down aps for easy subsequent call resp = cli.central.request(cli.central.get_devices, "aps", status="Up", **kwargs) if not resp: cli.display_results(resp, exit_on_fail=True) elif not resp.output: filters = ", ".join([f"{k}: {v}" for k, v in kwargs.items()]) resp.output = { "description": "API called was successful but returned no results.", "error": f"No Up APs found matching provided filters ({filters})." } resp.ok = False cli.display_results(resp, exit_on_fail=True) _all_aps = utils.listify(resp.output) _keys = ["name", "macaddr", "model", "site", "serial"] ap_dict = {d["serial"]: {k if k != "macaddr" else "mac": d[k] for k in d if k in _keys} for d in _all_aps} fstr_to_key = { "h": "neighborHostName", "m": "mac", "p": "remotePort", "M": "model", "S": "site", "s": "serial" } req_list, name_list, shown_promt = [], [], False if not ap_dict: log.error("Something went wrong, no ap_dict provided or empty", show=True) raise typer.Exit(1) num_calls = len(ap_dict) * 3 if need_lldp else len(ap_dict) * 2 if len(ap_dict) > 5: _warn = "\n\n[blink bright_red blink]WARNING[reset]" if need_lldp: _warn = f"{_warn} Format provided requires details about the upstream switch.\n" _warn = f"{_warn} This automation will result in [cyan]{num_calls}[/] API calls. 3 per AP.\n" _warn = f"{_warn} 1 to gather details about the upstream switch\n" else: _warn = f"{_warn} This automation will result in [cyan]{num_calls}[/] API calls, 1 for each AP.\n" _warn = f"{_warn} 1 to get the aps current settings (all settings need to be provided during the update, only the name changes).\n" _warn = f"{_warn} 1 to Update the settings / rename the AP.\n" _warn = f"{_warn}\n Current daily quota: [bright_green]{resp.rl.remain_day}[/] calls remaining\n" print(_warn) if resp.rl.remain_day < num_calls: print(f" {resp.rl}") print(f" More calls required {num_calls} than what's remaining in daily quota {resp.rl.remain_day}.") typer.confirm("Proceed:", abort=True) if need_lldp: ap_dict = _get_lldp_dict(ap_dict) # TODO refactor and use a template string or j2 something should already exist for this stuff. for ap in ap_dict: ap_dict[ap]["mac"] = utils.Mac(ap_dict[ap]["mac"]).clean while True: st = 0 x = '' try: for idx, c in enumerate(fstr): if not idx >= st: continue if c == '%': if fstr[idx + 1] not in fstr_to_key.keys(): _e1 = typer.style( f"Invalid source specifier ({fstr[idx + 1]}) in format string {fstr}: ", fg="red" ) _e2 = "Valid values:\n{}".format( ", ".join(fstr_to_key.keys()) ) typer.echo(f"{_e1}\n{_e2}") raise KeyError(f"{fstr[idx + 1]} is not valid") _src = ap_dict[ap][fstr_to_key[fstr[idx + 1]]] if fstr[idx + 2] != "[": if fstr[idx + 2] == "%" or fstr[idx + 3] == "%": x = f'{x}{_src}' st = idx + 2 elif fstr[idx + 2:idx + 4] == "((": # +3 should also be ( _from = fstr[idx + 4] _to = fstr[idx + 6] if not fstr[idx + 5] == ",": typer.secho( f"expected a comma at character {idx + 1 + 5} but found {fstr[idx + 5]}\n" "will try to proceed.", fg="bright_red" ) if not fstr[idx + 7:idx + 9] == "))": typer.secho( f"expected a )) at characters {idx + 1 + 7}-{idx + 1 + 8} " f"but found {fstr[idx + 7]}{fstr[idx + 8]}\n" "will try to proceed.", fg="bright_red" ) x = f'{x}{_src.replace(_from, _to)}' st = idx + 9 else: try: fi = _get_full_int(fstr[idx + 3:]) x = f'{x}{_src.split(fstr[idx + 2])[fi.i]}' st = idx + 3 + len(fi) except IndexError: _e1 = ", ".join(_src.split(fstr[idx + 2])) _e2 = len(_src.split(fstr[idx + 2])) typer.secho( f"\nCan't use segment {fi.o} of '{_e1}'\n" f" It only has {_e2} segments.\n", fg="red" ) raise else: # +2 is '[' if fstr[idx + 3] == "-": try: fi = _get_full_int(fstr[idx + 4:]) x = f'{x}{"".join(_src[-fi.o:])}' st = idx + 4 + len(fi) + 1 # +1 for closing ']' except IndexError: typer.secho( f"Can't extract the final {fi.o} characters from {_src}" f"It's only {len(_src)} characters." ) raise else: # +2 is '[' +3: should be int [1:4] fi = _get_full_int(fstr[idx + 3:]) fi2 = _get_full_int(fstr[idx + 3 + len(fi) + 1:]) # +1 for expected ':' if len(_src[slice(fi.i, fi2.o)]) < fi2.o - fi.i: _e1 = typer.style( f"\n{fstr} wants to take characters " f"\n{fi.o} through {fi2.o}" f"\n\"from {_src}\" (slice ends at character {len(_src[slice(fi.i, fi2.o)])}).", fg="red" ) if not shown_promt and typer.confirm( f"{_e1}" f"\n\nResult will be \"" f"{typer.style(''.join(_src[slice(fi.i, fi2.o)]), fg='bright_green')}\"" " for this segment." "\nOK to continue?" ): shown_promt = True x = f'{x}{"".join(_src[slice(fi.i, fi2.o)])}' st = idx + 3 + len(fi) + len(fi2) + 2 # +2 for : and ] else: raise typer.Abort() else: x = f'{x}{c}' req_list += [cli.central.BatchRequest(cli.central.update_ap_settings, (ap, x))] name_list += [f" {x}"] break except typer.Abort: fstr = _lldp_rename_get_fstr() except Exception as e: log.exception(f"LLDP rename exception while parsing {fstr}\n{e}", show=log.DEBUG) print(f"\nThere Appears to be a problem with [red]{fstr}[/]: {e.__class__.__name__}") if typer.confirm("Do you want to edit the fomat string and try again?", abort=True): fstr = _lldp_rename_get_fstr() print(f"[bright_green]Resulting AP names based on '{fstr}':") if len(name_list) <= 6: typer.echo("\n".join(name_list)) else: typer.echo("\n".join( [ *name_list[0:3], " ...", *name_list[-3:] ] ) ) if typer.confirm("Proceed with AP Rename?", abort=True): return cli.central.batch_request(req_list)
async def exec_api_call(self, url: str, data: dict = None, json_data: Union[dict, list] = None, method: str = "GET", headers: dict = {}, params: dict = {}, **kwargs) -> Response: auth = self.auth resp, spin = None, None _data_msg = ' ' if not url else f' [{url.split("arubanetworks.com/")[-1]}]' spin_txt_run = "Collecting Data..." spin_txt_fail = f"Collecting Data{_data_msg}" for _ in range(0, 2): if _ > 0: spin_txt_run += f" retry {_}" log.debug( f"Attempt API Call to:{_data_msg}Try: {_ + 1}\n" f" access token: {auth.central_info.get('token', {}).get('access_token', {})}\n" f" refresh token: {auth.central_info.get('token', {}).get('refresh_token', {})}" ) try: with Halo(spin_txt_run, enabled=bool(utils.tty)) as spin: _start = time.time() headers = self.headers if not headers else { **self.headers, **headers } # -- // THE API REQUEST \\ -- resp = await self.aio_session.request(method=method, url=url, params=params, data=data, json=json_data, headers=headers, ssl=self.ssl, **kwargs) elapsed = time.time() - _start try: output = await resp.json() try: raw_output = output.copy() except AttributeError: raw_output = output output = cleaner.strip_outer_keys(output) except (json.decoder.JSONDecodeError, ContentTypeError): output = raw_output = await resp.text() resp = Response(resp, output=output, raw=raw_output, elapsed=elapsed) except Exception as e: resp = Response(error=str(e), url=url) _ += 1 fail_msg = spin_txt_fail if self.silent else f"{spin_txt_fail}\n {resp.output}" if not resp: spin.fail(fail_msg) if "invalid_token" in resp.output: self.refresh_token() else: log.error( f"API [{method}] {url} Error Returned: {resp.error}") break else: # spin.succeed() spin.stop() break return resp
def display_results( self, resp: Union[Response, List[Response]] = None, data: Union[List[dict], List[str], dict, None] = None, tablefmt: TableFormat = "rich", title: str = None, caption: str = None, pager: bool = False, outfile: Path = None, sort_by: str = None, reverse: bool = False, stash: bool = True, pad: int = None, exit_on_fail: bool = False, ok_status: Union[int, List[int], Dict[int, str]] = None, set_width_cols: dict = None, full_cols: Union[List[str], str] = [], fold_cols: Union[List[str], str] = [], cleaner: callable = None, **cleaner_kwargs, ) -> None: """Output Formatted API Response to display and optionally to file one of resp or data attribute is required Args: resp (Union[Response, List[Response], None], optional): API Response objects. data (Union[List[dict], List[str], None], optional): API Response output data. tablefmt (str, optional): Format of output. Defaults to "rich" (tabular). Valid Values: "json", "yaml", "csv", "rich", "simple", "tabulate", "raw", "action" Where "raw" is unformatted raw response and "action" is formatted for POST|PATCH etc. where the result is a simple success/error. title: (str, optional): Title of output table. Only applies to "rich" tablefmt. Defaults to None. caption: (str, optional): Caption displayed at bottome of table. Only applies to "rich" tablefmt. Defaults to None. pager (bool, optional): Page Output / or not. Defaults to True. outfile (Path, optional): path/file of output file. Defaults to None. sort_by (Union[str, List[str], None] optional): column or columns to sort output on. reverse (bool, optional): reverse the output. stash (bool, optional): stash (cache) the output of the command. The CLI can re-display with show last. Default: True ok_status (Union[int, List[int], Tuple[int, str], List[Tuple[int, str]]], optional): By default responses with status_code 2xx are considered OK and are rendered as green by Output class. provide int or list of int to override additional status_codes that should also be rendered as success/green. provide a dict with {int: str, ...} where string can be any color supported by Output class or "neutral" "success" "fail" where neutral is no formatting, and success / fail will use the default green / red respectively. set_width_cols (Dict[str: Dict[str, int]]): Passed to output function defines cols with min/max width example: {'details': {'min': 10, 'max': 30}, 'device': {'min': 5, 'max': 15}} full_cols (list): columns to ensure are displayed at full length (no wrap no truncate) cleaner (callable, optional): The Cleaner function to use. """ # TODO remove ok_status, and handle in CentralAPI method (set resp.ok = True) if pad: log.error("Deprecated pad parameter referenced in display_results", show=True) if resp is not None: resp = utils.listify(resp) # update caption with rate limit if resp[-1].rl: rl_str = f"[italic dark_olive_green2]{resp[-1].rl}[/]".lstrip() caption = f"{caption}\n {rl_str}" if caption else f" {rl_str}" for idx, r in enumerate(resp): # Multi request url line m_colors = { "GET": "bright_green", "DELETE": "red", "PATH": "dark_orange3", "PUT": "dark_orange3", "POST": "dark_orange3" } fg = "bright_green" if r else "red" conditions = [ len(resp) > 1, tablefmt in ["action", "raw"], r.ok and not r.output ] if any(conditions): _url = r.url if not hasattr(r.url, "raw_path_qs") else r.url.path m_color = m_colors.get(r.method, "reset") print(f"Request {idx + 1} [[{m_color}]{r.method}[reset]: " f"[cyan]{_url}[/cyan]]\n [fg]Response[reset]:") if self.raw_out: tablefmt = "raw" if not r.output: c = Console(record=True) c.begin_capture() c.print(f" Status Code: [{fg}]{r.status}[/]") c.print( f" :warning: Empty Response. This may be normal.") r.output = c.end_capture() if not r or tablefmt in ["action", "raw"]: if tablefmt == "raw": # dots = f"[{fg}]{'.' * 16}[/{fg}]" status_code = f"[{fg}]status code: {r.status}[/{fg}]" print(r.url) print(status_code) if not r.ok: print(r.error) # print(f"{dots}\n{status_code}\n{dots}") print( "[bold cyan]Unformatted response from Aruba Central API GW[/bold cyan]" ) print(r.raw) if outfile: self.write_file(outfile, r.raw) else: print(f"[{fg}]{r}") if idx + 1 == len(resp): console.print(f"\n{rl_str}") else: self._display_results(r.output, tablefmt=tablefmt, title=title, caption=caption, pager=pager, outfile=outfile, sort_by=sort_by, reverse=reverse, stash=stash, pad=pad, set_width_cols=set_width_cols, full_cols=full_cols, fold_cols=fold_cols, cleaner=cleaner, **cleaner_kwargs) # TODO make elegant caas send-cmds uses this logic if cleaner and cleaner.__name__ == "parse_caas_response": print(caption) if exit_on_fail and not all([r.ok for r in resp]): raise typer.Exit(1) elif data: self._display_results(data, tablefmt=tablefmt, title=title, caption=caption, pager=pager, outfile=outfile, sort_by=sort_by, reverse=reverse, stash=stash, pad=pad, set_width_cols=set_width_cols, full_cols=full_cols, fold_cols=fold_cols, cleaner=cleaner, **cleaner_kwargs)