def import_vlan( key: str = typer.Argument( ..., help="The Key from stored_tasks with vlan details to import"), import_file: str = typer.Argument(None, exists=True), file: Path = typer.Option( None, exists=True, ), default: bool = typer.Option(False, "-d", is_flag=True, help="Use default central account", callback=cli.default_callback), debug: bool = typer.Option(False, "--debug", envvar="ARUBACLI_DEBUG", help="Enable Additional Debug Logging", callback=cli.debug_callback), account: str = typer.Option( "central_info", envvar="ARUBACLI_ACCOUNT", help="The Aruba Central Account to use (must be defined in the config)", callback=cli.account_name_callback), ) -> None: """Add VLAN from stored_tasks file. This is the same as `cencli batch add-vlan key`, but command: add_vlan is implied only need to provide key """ import_file = file or import_file or config.stored_tasks_file if import_file == config.stored_tasks_file and not key: typer.echo("key is required when using the default import file") data = config.get_file_data(import_file) if key: if hasattr(data, "dict"): # csv data = data.dict # TODO not tested in csv form data = {k: data[k] for k in data if data.get("key", "") == key} else: data = data.get(key) if data: args = data.get("arguments", []) kwargs = data.get("options", {}) _msg = ( f"\n{typer.style('add-vlan', fg='bright_green')}" f'\n{typer.style(" settings:", fg="cyan")}' f"\n args: {', '.join(args)}\n kwargs: {', '.join([f'{k}={v}' for k, v in kwargs.items()])}" ) typer.echo(f"{_msg}") confirm_msg = typer.style("Proceed?", fg="bright_green") if typer.confirm(confirm_msg): add_vlan(*args, **kwargs) else: raise typer.Abort() else: typer.secho(f"{key} Not found in {import_file}") raise typer.Exit(1)
def add( what: BatchArgs = typer.Argument(..., ), import_file: Path = typer.Argument(..., exists=True), default: bool = typer.Option(False, "-d", is_flag=True, help="Use default central account", callback=cli.default_callback), debug: bool = typer.Option(False, "--debug", envvar="ARUBACLI_DEBUG", help="Enable Additional Debug Logging", callback=cli.debug_callback), account: str = typer.Option( "central_info", envvar="ARUBACLI_ACCOUNT", help="The Aruba Central Account to use (must be defined in the config)", callback=cli.account_name_callback, ), ) -> None: """Perform batch Add operations using import data from file.""" central = cli.central data = config.get_file_data(import_file) resp = None if what == "sites": if import_file.suffix in [".csv", ".tsv", ".dbf", ".xls", ".xlsx"]: # TODO Exception handler if "address" in str( data.headers) and len(data.headers) > 3: # address info data = [{ "site_name": i.get("site_name", i.get("site", "ERROR")), "site_address": { k: v for k, v in i.items() if k not in ["site", "site_name"] } } for i in data.dict] else: # geoloc data = [{ "site_name": i.get("site_name", i.get("site", "ERROR")), "geolocation": { k: v for k, v in i.items() if k not in ["site", "site_name"] } } for i in data.dict] resp = central.request(central.create_site, site_list=data) cli.display_results(resp)
def get_arguments_from_import(import_file: str, key: str = None) -> list: """Get arguments from default import_file (stored_tasks.yaml) Args: import_file (str): name of import file key (str, optional): return single value for specific key if provided. Defaults to None. Returns: list: updated sys.argv list. """ # args = utils.read_yaml(import_file) args = config.get_file_data(Path(import_file)) if key and key in args: args = args[key] sys.argv += args return sys.argv
def rename( what: BatchArgs = typer.Argument(...,), import_file: Path = typer.Argument(None, metavar="['lldp'|IMPORT FILE PATH]"), # TODO completion lldp: bool = typer.Option(None, help="Automatic AP rename based on lldp info from upstream switch.",), ap: str = typer.Option(None, metavar=iden_meta_vars.dev, help="[LLDP rename] Perform on specified AP",), label: str = typer.Option(None, help="[LLDP rename] Perform on APs with specified label",), group: str = typer.Option(None, help="[LLDP rename] Perform on APs in specified group",), site: str = typer.Option(None, metavar=iden_meta_vars.site, help="[LLDP rename] Perform on APs in specified site",), model: str = typer.Option(None, help="[LLDP rename] Perform on APs of specified model",), yes: bool = typer.Option(False, "-Y", help="Bypass confirmation prompts - Assume Yes"), yes_: bool = typer.Option(False, "-y", hidden=True), default: bool = typer.Option(False, "-d", is_flag=True, help="Use default central account", show_default=False,), debug: bool = typer.Option( False, "--debug", envvar="ARUBACLI_DEBUG", help="Enable Additional Debug Logging", ), account: str = typer.Option( "central_info", envvar="ARUBACLI_ACCOUNT", help="The Aruba Central Account to use (must be defined in the config)", ), ) -> None: """Perform AP rename in batch from import file or automatically based on LLDP""" yes = yes_ if yes_ else yes if str(import_file).lower() == "lldp": lldp = True import_file = None if not import_file and not lldp: typer.echo("Error: Missing required parameter [IMPORT_FILE|'lldp']") raise typer.Exit(1) central = cli.central if import_file: if not import_file.exists(): typer.secho(f"Error: {import_file} not found.", fg="red") raise typer.Exit(1) data = config.get_file_data(import_file) resp = None if what == "aps": # transform flat csv struct to Dict[str, Dict[str, str]] {"<AP serial>": {"hostname": "<desired_name>"}} if import_file.suffix in [".csv", ".tsv", ".dbf", ".xls", ".xlsx"]: if data and len(data.headers) < 3: if "name" in data.headers: data = [{k if k != "name" else "hostname": d[k] for k in d} for d in data.dict] data.headers["hostname"] = data.headers.pop( data.headers.index(data.headers["name"]) ) data = { i.get("serial", i.get("serial_number", i.get("serial_num", "ERROR"))): {k: v for k, v in i.items() if not k.startswith("serial")} for i in data.dict } calls, conf_msg = [], [typer.style("Names Gathered from import:", fg="bright_green")] for ap in data: # serial num conf_msg += [f" {ap}: {data[ap]['hostname']}"] calls.append(central.BatchRequest(central.update_ap_settings, (ap,), data[ap])) if len(conf_msg) > 6: conf_msg = [*conf_msg[0:3], "...", *conf_msg[-3:]] typer.echo("\n".join(conf_msg)) if yes or typer.confirm("Proceed with AP rename?", abort=True): resp = central.batch_request(calls) elif lldp: kwargs = {} if group: kwargs["group"] = cli.cache.get_group_identifier(group).name if ap: kwargs["serial"] = cli.cache.get_dev_identifier(ap, dev_type="ap").serial if site: kwargs["site"] = cli.cache.get_site_identifier(site).name if model: kwargs["model"] = model if label: kwargs["label"] = label resp = do_lldp_rename(_lldp_rename_get_fstr(), **kwargs) cli.display_results(resp, exit_on_fail=True)
def delete( what: BatchDelArgs = typer.Argument(...,), import_file: Path = typer.Argument(..., exists=True, readable=True), yes: bool = typer.Option(False, "-Y", help="Bypass confirmation prompts - Assume Yes"), yes_: bool = typer.Option(False, "-y", hidden=True), default: bool = typer.Option( False, "-d", is_flag=True, help="Use default central account", show_default=False, ), debug: bool = typer.Option( False, "--debug", envvar="ARUBACLI_DEBUG", help="Enable Additional Debug Logging", ), account: str = typer.Option( "central_info", envvar="ARUBACLI_ACCOUNT", help="The Aruba Central Account to use (must be defined in the config)", ), ) -> None: """Perform batch Delete operations using import data from file.""" yes = yes_ if yes_ else yes central = cli.central data = config.get_file_data(import_file) if hasattr(data, "dict"): # csv data = data.dict resp = None if what == "sites": del_list = [] _msg_list = [] for i in data: if isinstance(i, str) and isinstance(data[i], dict): i = {"site_name": i, **data[i]} if "name" not in i and "site_name" not in i else data[i] if "site_id" not in i and "id" not in i: if "site_name" in i or "name" in i: _name = i.get("site_name", i.get("name")) _id = cli.cache.get_site_identifier(_name).id found = True _msg_list += [_name] del_list += [_id] else: found = False for key in ["site_id", "id"]: if key in i: del_list += [i[key]] _msg_list += [i.get("site_name", i.get("site", i.get("name", f"id: {i[key]}")))] found = True break if not found: if i.get("site_name", i.get("site", i.get("name"))): site = cli.cache.get_site_identifier(i.get("site_name", i.get("site", i.get("name")))) _msg_list += [site.name] del_list += [site.id] break else: typer.secho("Error getting site ids from import, unable to find required key", fg="red") raise typer.Exit(1) if len(_msg_list) > 7: _msg_list = [*_msg_list[0:3], "...", *_msg_list[-3:]] typer.secho("\nSites to delete:", fg="bright_green") typer.echo("\n".join([f" {m}" for m in _msg_list])) if yes or typer.confirm(f"\n{typer.style('Delete', fg='red')} {len(del_list)} sites", abort=True): resp = central.request(central.delete_site, del_list) if resp: cache_del_res = asyncio.run(cli.cache.update_site_db(data=del_list, remove=True)) if len(cache_del_res) != len(del_list): log.warning( f"Attempt to delete entries from Site Cache returned {len(cache_del_res)} " f"but we tried to delete {len(del_list)} sites.", show=True ) cli.display_results(resp)
def add( what: BatchArgs = typer.Argument(...,), import_file: Path = typer.Argument(..., exists=True), yes: bool = typer.Option(False, "-Y", help="Bypass confirmation prompts - Assume Yes"), yes_: bool = typer.Option(False, "-y", hidden=True), default: bool = typer.Option( False, "-d", is_flag=True, help="Use default central account", show_default=False ), debug: bool = typer.Option( False, "--debug", envvar="ARUBACLI_DEBUG", help="Enable Additional Debug Logging", ), account: str = typer.Option( "central_info", envvar="ARUBACLI_ACCOUNT", help="The Aruba Central Account to use (must be defined in the config)", ), ) -> None: """Perform batch Add operations using import data from file.""" yes = yes_ if yes_ else yes central = cli.central data = config.get_file_data(import_file) resp = None if what == "sites": if import_file.suffix in [".csv", ".tsv", ".dbf", ".xls", ".xlsx"]: # TODO Exception handler if "address" in str(data.headers) and len(data.headers) > 3: # address info data = [ { "site_name": i.get("site_name", i.get("site", i.get("name"))), "site_address": {k: v for k, v in i.items() if k not in ["site", "site_name"]} } for i in data.dict ] else: # geoloc data = [ { "site_name": i.get("site_name", i.get("site", i.get("name"))), "geolocation": {k: v for k, v in i.items() if k not in ["site", "site_name"]} } for i in data.dict ] site_names = [ d.get("site_name", "ERROR") for d in data ] if len(site_names) > 7: site_names = [*site_names[0:3], "...", *site_names[-3:]] _msg = [ typer.style("Batch Add Sites:", fg="cyan"), typer.style( "\n".join([typer.style(f' {n}', fg="bright_green" if n != "..." else "reset") for n in site_names]) ), typer.style("Proceed with Site Additions?", fg="cyan") ] _msg = "\n".join(_msg) if yes or typer.confirm(_msg, abort=True): resp = central.request(central.create_site, site_list=data) if resp: cache_data = [{k.replace("site_", ""): v for k, v in d.items()} for d in resp.output] cache_res = asyncio.run(cli.cache.update_site_db(data=cache_data)) if len(cache_res) != len(data): log.warning( "Attempted to add entries to Site Cache after batch import. Cache Response " f"{len(cache_res)} but we added {len(data)} sites.", show=True ) cli.display_results(resp)
def send_cmds( kw1: constants.SendCmdArgs = typer.Argument( ..., ), nodes: str = typer.Argument( None, autocompletion=cache.send_cmds_completion, metavar=iden.group_or_dev_or_site, # callback=cli.send_cmds_node_callback, # is_eager=True, ), kw2: str = typer.Argument( None, autocompletion=cache.send_cmds_completion, # callback=cli.send_cmds_node_callback, ), commands: List[str] = typer.Argument(None, callback=cli.send_cmds_node_callback), cmd_file: Path = typer.Option(None, help="Path to file containing commands (1 per line) to be sent to device", exists=True), # dev_file: Path = typer.Option(None, help="Path to file containing iden for devices to send commands to", exists=True), # group: bool = typer.Option(None, help="Send commands to all gateways in a group", autocompletion=cli.cache.group_completion), # site: bool = typer.Option(None, help="Send commands to all gateways in a site", autocompletion=cli.cache.site_completion), all: bool = typer.Option(False, "-A", help="Send command(s) to all gateways (device level update) when group is provided"), yes: bool = typer.Option(False, "-Y", help="Bypass confirmation prompts - Assume Yes"), yes_: bool = typer.Option(False, "-y", hidden=True), default: bool = typer.Option(False, "-d", is_flag=True, help="Use default central account", callback=cli.default_callback), debug: bool = typer.Option(False, "--debug", envvar="ARUBACLI_DEBUG", help="Enable Additional Debug Logging", callback=cli.debug_callback), account: str = typer.Option("central_info", envvar="ARUBACLI_ACCOUNT", help="The Aruba Central Account to use (must be defined in the config)", callback=cli.account_name_callback), ) -> None: console = Console(emoji=False) yes = yes if yes else yes_ commands = commands or [] if kw1 == "group": if all: g = cache.get_group_identifier(nodes) nodes = [cache.CentralObject(d) for d in cache.devices if d["type"] == "gw" and d["group"] == g.name] action = f"all devices in {g.name} group." else: nodes = cache.get_group_identifier(nodes) action = f"group level gateway config for {nodes.name} group." elif kw1 == "site": s = cache.get_group_identifier(nodes) nodes = [cache.CentralObject(d) for d in cache.devices if d["type"] == "gw" and d["site"] == s.name] action = f"all devices in site: {s.name}" elif kw1 == "file": dev_file = Path(nodes) file_data = config.get_file_data(dev_file, text_ok=True) if not file_data: print(f"No data parsed from file {dev_file.name}.") raise typer.Exit(1) if isinstance(file_data, list): nodes = [cache.get_identifier(d.strip(), ["dev", "group", "site"], device_type="gw") for d in file_data] else: devices = file_data.get("devices", file_data.get("gateways")) if devices: nodes = [cache.get_identifier(d.strip(), ["dev", "group", "site"], device_type="gw") for d in file_data["devices"]] elif "groups" in file_data: nodes = [cache.CentralObject(d) for d in cache.devices if d["type"] == "gw" and d["group"] in file_data["groups"]] elif "sites" in file_data: nodes = [cache.CentralObject(d) for d in cache.devices if d["type"] == "gw" and d["site"] in file_data["sites"]] else: print(f"Expected 'gateways', 'groups', or 'sites' key in {dev_file.name}.") raise typer.Exit(1) if "cmds" in file_data or "commands" in file_data: if commands: print("Providing commands on the command line and in the import file is a strange thing to do.") raise typer.Exit(1) commands = file_data.get("cmds", file_data.get("commands")) elif kw1 == "device": if not isinstance(nodes, str): print(f"nodes is of type {type(nodes)} this is unexpected.") nodes = [cache.get_identifier(nodes, ["dev"], "gw")] if cmd_file: if commands: print("Providing commands on the command line and in the import file is a strange thing to do.") raise typer.Exit(1) else: commands = [line.rstrip() for line in cmd_file.read_text().splitlines()] if not commands: print("Error No commands provided") raise typer.Exit(1) if yes or typer.confirm("\nProceed?", abort=True): caasapi = caas.CaasAPI(central=cli.central) _reqs = [ cli.central.BatchRequest( caasapi.send_commands, n.name if not n.is_dev else n.mac, cli_cmds=commands ) for n in utils.listify(nodes) ] batch_res = cli.central.batch_request(_reqs) cli.display_results(batch_res, cleaner=cleaner.parse_caas_response)
def caas_batch( key: str = typer.Argument(None,), file: Path = typer.Option(config.stored_tasks_file, exists=True,), command: str = typer.Option(None,), default: bool = typer.Option(False, "-d", is_flag=True, help="Use default central account", callback=cli.default_callback), debug: bool = typer.Option(False, "--debug", envvar="ARUBACLI_DEBUG", help="Enable Additional Debug Logging", callback=cli.debug_callback), account: str = typer.Option("central_info", envvar="ARUBACLI_ACCOUNT", help="The Aruba Central Account to use (must be defined in the config)", callback=cli.account_name_callback), ) -> None: """cencli caas batch add-vlan add-vlan-99""" caasapi = caas.CaasAPI(central=cli.central) if file == config.stored_tasks_file and not key: print("[bright_red]ERROR:[/] key is required when using the default import file") raise typer.Exit(1) data = config.get_file_data(file) if hasattr(data, "dict"): # csv data = data.dict data = {k: data[k] for k in data if data.get("key", "") == key} else: data = data.get(key) if not data: print(f"[bright_red]ERROR:[/] [cyan]{key}[/] not found in [cyan]{file}[/]. No Data to Process") raise typer.Exit(1) else: args = data.get("arguments", []) kwargs = data.get("options", {}) cmds = data.get("cmds", []) if not args: print("[bright_red]ERROR:[/] import data requires an argument specifying the group / device") raise typer.Exit(1) if command: command = command.replace('-', '_') _msg1 = typer.style( f"Proceed with {command}:", fg="cyan" ) _msg2 = f"{', '.join(args)} {', '.join([f'{k}={v}' for k, v in kwargs.items()])}" confirm_msg = typer.style(f"{_msg1} {_msg2}?", fg="bright_green") if command in globals(): fn = globals()[command] if typer.confirm(confirm_msg): fn(*args, **kwargs) # type: ignore # NoQA else: raise typer.Abort() else: typer.echo(f"{command} doesn't appear to be valid") elif cmds: print(f"\nSending the following to [cyan]{utils.unlistify(args)}[/]") if kwargs: print("\n With the following options:") _ = [print(f" {k} : {v}") for k, v in kwargs.items()] print(f" [bold]cli cmds:[/]") _ = [print(f" [cyan]{c}[/]") for c in cmds] if typer.confirm("Proceed:"): kwargs = {**kwargs, **{"cli_cmds": cmds}} resp = cli.central.request(caasapi.send_commands, *args, **kwargs) caas.eval_caas_response(resp)
def batch_add_groups(import_file: Path, yes: bool = False) -> List[Response]: console = Console(emoji=False) br = cli.central.BatchRequest data = config.get_file_data(import_file) # TODO handle csv if isinstance(data, dict) and "groups" in data: data = data["groups"] reqs, gw_reqs, ap_reqs = [], [], [] pre_cfgs = [] _pre_config_msg = "" cache_data = [] for group in data: if "allowed-types" in data[group]: data[group]["allowed_types"] = data[group]["allowed-types"] del data[group]["allowed-types"] try: g = GroupImport(**{"group": group, **data[group]}) except ValidationError as e: print(e) raise typer.Exit(1) reqs += [ br( cli.central.create_group, g.group, allowed_types=g.allowed_types, wired_tg=g.wired_tg, wlan_tg=g.wlan_tg, aos10=g.aos10, microbranch=g.microbranch, gw_role=g.gw_role, monitor_only_sw=g.monitor_only_sw, monitor_only_cx=g.monitor_only_cx, ) ] cache_data += [ {"name": g.group, "template group": {"Wired": g.wired_tg, "Wireless": g.wlan_tg}} ] for dev_type, cfg_file, var_file in zip(["gw", "ap"], [g.gw_config, g.ap_config], [g.gw_vars, g.ap_vars]): if cfg_file is not None: pc = _build_pre_config(g.group, dev_type=dev_type, cfg_file=cfg_file, var_file=var_file) pre_cfgs += [pc] _pre_config_msg += ( f" [bright_green]{len(pre_cfgs)}[/]. [cyan]{g.group}[/] {'gateways' if dev_type == 'gw' else 'AP'} " f"group level will be configured based on [cyan]{cfg_file.name}[/]\n" ) if dev_type == "gw": gw_reqs += [pc.request] else: ap_reqs += [pc.request] print(f"[bright_green]The following groups will be created:[/]") _ = [print(f" [cyan]{g}[/]") for g in data] _pre_config_msg = ( "\n[bright_green]Group level configurations will be sent:[/]\n" f"{_pre_config_msg}" f"\n[italic dark_olive_green2]{len(reqs) + len(gw_reqs) + len(ap_reqs)} API calls will be performed.[/]\n" ) print(_pre_config_msg) for idx in range(len(pre_cfgs) + 1): if idx > 0: print(_pre_config_msg) print(f"Select [bright_green]#[/] to display config to be sent or [bright_green]go[/] to continue.") ch = utils.ask( ">", console=console, choices=[*[str(idx) for idx in range(1, len(pre_cfgs) + 1)], "abort", "go"], ) if ch.lower() == "go": yes = True break else: pc: PreConfig = pre_cfgs[int(ch) - 1] console.rule(f"Config to be sent to {pc.name}") with console.pager(): console.print(pc.config) console.rule(f" End {pc.name} config ") if reqs and yes or typer.confirm("Proceed?", abort=True): resp = cli.central.batch_request(reqs) if all(r.ok for r in resp): cache_resp = asyncio.run(cli.cache.update_group_db(cache_data)) log.debug(f"batch add group cache resp: {cache_resp}") cli.display_results(resp) if gw_reqs: print("\n[bright_green]Results from Group level gateway config push (CLI commands)[/]") print("\n [italic]This can take some time.[/]") resp = cli.central.batch_request(gw_reqs) cli.display_results(resp, cleaner=cleaner.parse_caas_response) if ap_reqs: print("\n[bright_green]Results from Group level AP config push (Replaces entire group level)[/]\n") resp = cli.central.batch_request(ap_reqs) cli.display_results(resp, tablefmt="action")
def batch_add_sites(import_file: Path, yes: bool = False) -> Response: central = cli.central name_aliases = ["site-name", "site", "name"] _site_aliases = { "site-name": "site_name", "site": "site_name", "name": "site_name", "latitude": "lat", "longitude": "lon", "zipcode": "zip", } def convert_site_key(_data: dict) -> dict: _data = { **_data.get("site_address", {}), **_data.get("geolocation", {}), **{k: v for k, v in _data.items() if k not in ["site_address", "geolocation"]} } _data = {_site_aliases.get(k, k): v for k, v in _data.items()} return _data data = config.get_file_data(import_file) if "sites" in data: data = data["sites"] resp = None verified_sites: List[SiteImport] = [] # TODO test with csv ... NOT YET TESTED if import_file.suffix in [".csv", ".tsv", ".dbf", ".xls", ".xlsx"]: verified_sites = [SiteImport(**convert_site_key(i)) for i in data.dict] else: # We allow a list of flat dicts or a list of dicts where loc info is under # "site_address" or "geo_location" # can be keyed on name or flat. for i in data: if isinstance(i, str) and isinstance(data[i], dict): out_dict = convert_site_key( {"site_name": i, **data[i]} ) else: out_dict = convert_site_key(i) verified_sites += [SiteImport(**out_dict)] site_names = [ f" [cyan]{s.site_name}[/]" for s in verified_sites ] if len(site_names) > 7: site_names = [*site_names[0:3], " ...", *site_names[-3:]] print("[bright_green]The Following Sites will be created:[/]") _ = [print(s) for s in site_names] if yes or typer.confirm("Proceed?", abort=True): reqs = [ BatchRequest(central.create_site, **site.dict()) for site in verified_sites ] resp = central.batch_request(reqs) if all([r.ok for r in resp]): resp[-1].output = [r.output for r in resp] resp = resp[-1] cache_res = asyncio.run(cli.cache.update_site_db(data=resp.output)) if len(cache_res) != len(data): log.warning( "Attempted to add entries to Site Cache after batch import. Cache Response " f"{len(cache_res)} but we added {len(data)} sites.", show=True ) return resp
def rename( what: BatchArgs = typer.Argument(..., ), import_file: Path = typer.Argument(None, exists=True), lldp: bool = typer.Option( None, help="Automatic AP rename based on lldp info from upstream switch.", ), yes: bool = typer.Option(False, "-Y", help="Bypass confirmation prompts - Assume Yes"), yes_: bool = typer.Option(False, "-y", hidden=True), default: bool = typer.Option(False, "-d", is_flag=True, help="Use default central account", callback=cli.default_callback), debug: bool = typer.Option(False, "--debug", envvar="ARUBACLI_DEBUG", help="Enable Additional Debug Logging", callback=cli.debug_callback), account: str = typer.Option( "central_info", envvar="ARUBACLI_ACCOUNT", help="The Aruba Central Account to use (must be defined in the config)", callback=cli.account_name_callback, ), ) -> None: """Perform AP rename in batch from import file or automatically based on LLDP""" central = cli.central if import_file: data = config.get_file_data(import_file) resp = None if what == "aps": if import_file.suffix in [".csv", ".tsv", ".dbf", ".xls", ".xlsx"]: if data and len(data.headers) < 3: if "name" in data.headers: data = [{ k if k != "name" else "hostname": d[k] for k in d } for d in data.dict] data.headers["hostname"] = data.headers.pop( data.headers.index(data.headers["name"])) data = { i.get( "serial", i.get("serial_number", i.get("serial_num", "ERROR"))): { k: v for k, v in i.items() if not k.startswith("serial") } for i in data.dict } calls = [] for ap in data: calls.append( central.BatchRequest(central.update_ap_settings, (ap, ), data[ap])) resp = central.batch_request(calls) elif lldp: rtxt = typer.style("RESULT: ", fg=typer.colors.BRIGHT_BLUE) typer.secho("Rename APs based on LLDP:", fg="bright_green") typer.echo( "This function will automatically rename APs based on a combination of\n" "information from the upstream switch (via LLDP) and from the AP itself.\n\n" "Please provide a format string based on these examples:\n" " For the examples: hostname 'SNANTX-IDF3-sw1, AP on port 7\n" " AP mac aa:bb:cc:dd:ee:ff\n" f"{typer.style('Format String Examples:', fg='cyan')}\n" " Upstream switches hostname: \n" " '%h[1:4]%' will use the first 3 characters of the switches hostname.\n" f" {rtxt} 'SNAN'\n" " '%H-1%' will split the hostname into parts separating on '-' and use\n" f" the firt segment. {rtxt} 'SNANTX\n" f" '%p%' represents the interface. {rtxt} '7'\n" " note: an interface in the form 1/1/12 is converted to 1_1_12\n" " '%p/3% seperates the port string on / and uses the 3rd segment.\n" " '%m% or %m[-4] = last 4 digits of the AP MAC\n" " '%m:1% would split on : and take the 1st segment.\n") fstr = typer.prompt("Enter Desired format string:") do_lldp_rename(fstr) else: typer.secho( "import file Argument is required if --lldp flag not provided", fg="red") cli.display_results(resp)