コード例 #1
0
    def get_log_identifier(self, query: str) -> str:
        if "audit_trail" in query:
            return query
        elif query == "":  # tab completion
            return [x["id"] for x in self.logs]

        try:

            match = self.LogDB.search(self.Q.id == int(query))
            if not match:
                log.warning(
                    f"Unable to gather log id from short index query {query}",
                    show=True)
                typer.echo(
                    "Short log_id aliases are built each time 'show logs' is ran."
                )
                typer.echo(
                    "  You can verify the cache by running (hidden command) 'show cache logs'"
                )
                typer.echo(
                    "  run 'show logs [OPTIONS]' then use the short index for details"
                )
                raise typer.Exit(1)
            else:
                return match[-1]["long_id"]

        except ValueError as e:
            log.exception(
                f"Exception in get_log_identifier {e.__class__.__name__}\n{e}")
            typer.secho(
                f"Exception in get_log_identifier {e.__class__.__name__}",
                fg="red")
            raise typer.Exit(1)
コード例 #2
0
    def get_template_identifier(
        self,
        query_str: str,
        ret_field: str = "name",
        group: str = None,
        retry: bool = True,
        multi_ok: bool = False,
    ) -> CentralObject:
        """Allows case insensitive template match by template name"""
        match = None
        for _ in range(0, 2 if retry else 1):
            match = self.TemplateDB.search(
                (self.Q.name == query_str)
                | self.Q.name.test(lambda v: v.lower() == query_str.lower()))

            if not match:
                match = self.TemplateDB.search(
                    self.Q.name.test(lambda v: v.lower() == query_str.lower().
                                     replace("_", "-")))

            if not match:
                match = self.TemplateDB.search(
                    self.Q.name.test(
                        lambda v: v.lower().startswith(query_str.lower())))

            if retry and not match and self.central.get_all_templates not in self.updated:
                typer.secho(
                    f"No Match Found for {query_str}, Updating template Cachce",
                    fg="red")
                self.check_fresh(refresh=True, template_db=True)
            if match:
                break

        if match:
            if len(match) > 1:
                if group:
                    match = [{k: d[k]
                              for k in d} for d in match
                             if d["group"].lower() == group.lower()]

            if len(match) > 1:
                match = self.handle_multi_match(
                    match,
                    query_str=query_str,
                    query_type="template",
                    multi_ok=multi_ok,
                )

            return CentralObject("template", match)

        elif retry:
            log.error(
                f"Unable to gather template {ret_field} from provided identifier {query_str}",
                show=True)
            raise typer.Abort()
        else:
            log.warning(
                f"Unable to gather template {ret_field} from provided identifier {query_str}",
                show=False)
コード例 #3
0
ファイル: clidel.py プロジェクト: veeruds/central-api-cli
def site(
    sites: List[str] = typer.Argument(
        ...,
        help="Site(s) to delete (can provide more than one).",
        autocompletion=cli.cache.site_completion,
    ),
    yes: bool = typer.Option(False,
                             "-Y",
                             help="Bypass confirmation prompts - Assume Yes"),
    yes_: bool = typer.Option(False, "-y", hidden=True),
    debug: bool = typer.Option(
        False,
        "--debug",
        envvar="ARUBACLI_DEBUG",
        help="Enable Additional Debug Logging",
    ),
    default: bool = typer.Option(
        False,
        "-d",
        is_flag=True,
        help="Use default central account",
        show_default=False,
    ),
    account: str = typer.Option(
        "central_info",
        envvar="ARUBACLI_ACCOUNT",
        help="The Aruba Central Account to use (must be defined in the config)",
    ),
) -> None:
    yes = yes_ if yes_ else yes
    sites = [cli.cache.get_site_identifier(s) for s in sites]

    _del_msg = [f"  {typer.style(s.name, fg='reset')}" for s in sites]
    if len(_del_msg) > 7:
        _del_msg = [*_del_msg[0:3], "  ...", *_del_msg[-3:]]
    _del_msg = "\n".join(_del_msg)
    confirm_1 = typer.style("About to", fg="cyan")
    confirm_2 = typer.style("Delete:", fg="bright_red")
    confirm_3 = f'{typer.style(f"Confirm", fg="cyan")} {typer.style(f"delete", fg="red")}'
    confirm_3 = f'{confirm_3} {typer.style(f"{len(sites)} sites?", fg="cyan")}'
    _msg = f"{confirm_1} {confirm_2}\n{_del_msg}\n{confirm_3}"

    if yes or typer.confirm(_msg, abort=True):
        del_list = [s.id for s in sites]
        resp = cli.central.request(cli.central.delete_site, del_list)
        cli.display_results(resp)
        if resp:
            cache_del_res = asyncio.run(
                cli.cache.update_site_db(data=del_list, remove=True))
            if len(cache_del_res) != len(del_list):
                log.warning(
                    f"Attempt to delete entries from Site Cache returned {len(cache_del_res)} "
                    f"but we tried to delete {len(del_list)}",
                    show=True)
コード例 #4
0
    def write_file(outfile: Path, outdata: str) -> None:
        """Output data to file

        Args:
            outfile (Path): The file to write to.
            outdata (str): The text to write.
        """
        if outfile and outdata:
            if config.cwd != config.outdir:
                if (outfile.parent.resolve().name == "central-api-cli"
                        and Path.joinpath(
                            outfile.parent.resolve() / ".git").is_dir()):
                    # outdir = Path.home() / 'cencli-out'
                    print(
                        "\n[bright_green]You appear to be in the development git dir.\n"
                        f"Exporting to[/] [cyan]{config.outdir.relative_to(config.cwd)}[/] directory."
                    )
                    config.outdir.mkdir(exist_ok=True)
                    outfile = config.outdir / outfile

            print(f"\n[cyan]Writing output to {outfile}... ", end="")

            out_msg = None
            try:
                if isinstance(outdata, (dict, list)):
                    outdata = json.dumps(outdata, indent=4)
                outfile.write_text(
                    outdata)  # typer.unstyle(outdata) also works
            except Exception as e:
                outfile.write_text(f"{outdata}")
                out_msg = f"Error ({e.__class__.__name__}) occurred during attempt to output to file.  " \
                    "Used simple string conversion"

            print("[italic green]Done")
            if out_msg:
                log.warning(out_msg, show=True)
コード例 #5
0
ファイル: clicommon.py プロジェクト: veeruds/central-api-cli
    def display_results(
        self,
        resp: Union[Response, List[Response]] = None,
        data: Union[List[dict], List[str], None] = None,
        tablefmt: str = "rich",
        title: str = None,
        caption: str = None,
        pager: bool = True,
        outfile: Path = None,
        sort_by: str = None,
        reverse: bool = None,
        pad: int = None,
        exit_on_fail: bool = False,
        ok_status: Union[int, List[int], Dict[int, str]] = None,
        cleaner: callable = None,
        **cleaner_kwargs,
    ) -> None:
        """Output Formatted API Response to display and optionally to file

        one of resp or data attribute is required

        Args:
            resp (Union[Response, List[Response], None], optional): API Response objects.
            data (Union[List[dict], List[str], None], optional): API Response output data.
            tablefmt (str, optional): Format of output. Defaults to "rich" (tabular).
            title: (str, optional): Title of output table.
                Only applies to "rich" tablefmt. Defaults to None.
            caption: (str, optional): Caption displayed at bottome of table.
                Only applies to "rich" tablefmt. Defaults to None.
            pager (bool, optional): Page Output / or not. Defaults to True.
            outfile (Path, optional): path/file of output file. Defaults to None.
            sort_by (Union[str, List[str], None] optional): column or columns to sort output on.
            reverse (bool, optional): reverse the output.
            ok_status (Union[int, List[int], Tuple[int, str], List[Tuple[int, str]]], optional): By default
                responses with status_code 2xx are considered OK and are rendered as green by
                Output class.  provide int or list of int to override additional status_codes that
                should also be rendered as success/green.  provide a dict with {int: str, ...}
                where string can be any color supported by Output class or "neutral" "success" "fail"
                where neutral is no formatting, and success / fail will use the default green / red respectively.
            cleaner (callable, optional): The Cleaner function to use.
        """
        # TODO remove ok_status, and handle in CentralAPI method (set resp.ok = True)
        if pad:
            log.warning("Depricated pad parameter referenced in display_results")

        pager = False if config.no_pager else pager

        if resp is not None:
            resp = utils.listify(resp)

            # data = []
            for idx, r in enumerate(resp):
                if len(resp) > 1:
                    _url = r.url if not hasattr(r.url, "path") else r.url.path
                    typer.secho(f"Request {idx + 1} [{r.method}: {_url}] Response:", fg="cyan")
                if not r or tablefmt == "action":
                    fg = "green" if r else "red"

                    typer.secho(str(r), fg=fg)
                    if not r and exit_on_fail:
                        raise typer.Exit(1)
                else:
                    if str(r.rl) != "None":
                        caption = f"{caption} [italic dark_olive_green2]{r.rl}".lstrip()
                    self._display_results(
                        r.output,
                        tablefmt=tablefmt,
                        title=title,
                        caption=caption,
                        pager=pager,
                        outfile=outfile,
                        sort_by=sort_by,
                        reverse=reverse,
                        pad=pad,
                        cleaner=cleaner,
                        **cleaner_kwargs
                    )

        elif data:
            self._display_results(
                data,
                tablefmt=tablefmt,
                title=title,
                caption=caption,
                pager=pager,
                outfile=outfile,
                sort_by=sort_by,
                reverse=reverse,
                pad=pad,
                cleaner=cleaner,
                **cleaner_kwargs
            )
コード例 #6
0
def delete(
    what: BatchDelArgs = typer.Argument(...,),
    import_file: Path = typer.Argument(..., exists=True, readable=True),
    yes: bool = typer.Option(False, "-Y", help="Bypass confirmation prompts - Assume Yes"),
    yes_: bool = typer.Option(False, "-y", hidden=True),
    default: bool = typer.Option(
        False, "-d", is_flag=True, help="Use default central account", show_default=False,
    ),
    debug: bool = typer.Option(
        False, "--debug", envvar="ARUBACLI_DEBUG", help="Enable Additional Debug Logging",
    ),
    account: str = typer.Option(
        "central_info",
        envvar="ARUBACLI_ACCOUNT",
        help="The Aruba Central Account to use (must be defined in the config)",
    ),
) -> None:
    """Perform batch Delete operations using import data from file."""
    yes = yes_ if yes_ else yes
    central = cli.central
    data = config.get_file_data(import_file)
    if hasattr(data, "dict"):  # csv
        data = data.dict

    resp = None
    if what == "sites":
        del_list = []
        _msg_list = []
        for i in data:
            if isinstance(i, str) and isinstance(data[i], dict):
                i = {"site_name": i, **data[i]} if "name" not in i and "site_name" not in i else data[i]

            if "site_id" not in i and "id" not in i:
                if "site_name" in i or "name" in i:
                    _name = i.get("site_name", i.get("name"))
                    _id = cli.cache.get_site_identifier(_name).id
                    found = True
                    _msg_list += [_name]
                    del_list += [_id]
            else:
                found = False
                for key in ["site_id", "id"]:
                    if key in i:
                        del_list += [i[key]]
                        _msg_list += [i.get("site_name", i.get("site", i.get("name", f"id: {i[key]}")))]
                        found = True
                        break

            if not found:
                if i.get("site_name", i.get("site", i.get("name"))):
                    site = cli.cache.get_site_identifier(i.get("site_name", i.get("site", i.get("name"))))
                    _msg_list += [site.name]
                    del_list += [site.id]
                    break
                else:
                    typer.secho("Error getting site ids from import, unable to find required key", fg="red")
                    raise typer.Exit(1)

        if len(_msg_list) > 7:
            _msg_list = [*_msg_list[0:3], "...", *_msg_list[-3:]]
        typer.secho("\nSites to delete:", fg="bright_green")
        typer.echo("\n".join([f"  {m}" for m in _msg_list]))
        if yes or typer.confirm(f"\n{typer.style('Delete', fg='red')} {len(del_list)} sites", abort=True):
            resp = central.request(central.delete_site, del_list)
            if resp:
                cache_del_res = asyncio.run(cli.cache.update_site_db(data=del_list, remove=True))
                if len(cache_del_res) != len(del_list):
                    log.warning(
                        f"Attempt to delete entries from Site Cache returned {len(cache_del_res)} "
                        f"but we tried to delete {len(del_list)} sites.",
                        show=True
                    )

    cli.display_results(resp)
コード例 #7
0
def add(
    what: BatchArgs = typer.Argument(...,),
    import_file: Path = typer.Argument(..., exists=True),
    yes: bool = typer.Option(False, "-Y", help="Bypass confirmation prompts - Assume Yes"),
    yes_: bool = typer.Option(False, "-y", hidden=True),
    default: bool = typer.Option(
        False, "-d", is_flag=True, help="Use default central account", show_default=False
    ),
    debug: bool = typer.Option(
        False, "--debug", envvar="ARUBACLI_DEBUG", help="Enable Additional Debug Logging",
    ),
    account: str = typer.Option(
        "central_info",
        envvar="ARUBACLI_ACCOUNT",
        help="The Aruba Central Account to use (must be defined in the config)",
    ),
) -> None:
    """Perform batch Add operations using import data from file."""
    yes = yes_ if yes_ else yes
    central = cli.central
    data = config.get_file_data(import_file)

    resp = None
    if what == "sites":
        if import_file.suffix in [".csv", ".tsv", ".dbf", ".xls", ".xlsx"]:
            # TODO Exception handler
            if "address" in str(data.headers) and len(data.headers) > 3:  # address info
                data = [
                    {
                        "site_name": i.get("site_name", i.get("site", i.get("name"))),
                        "site_address": {k: v for k, v in i.items() if k not in ["site", "site_name"]}
                    }
                    for i in data.dict
                ]
            else:  # geoloc
                data = [
                    {
                        "site_name": i.get("site_name", i.get("site", i.get("name"))),
                        "geolocation": {k: v for k, v in i.items() if k not in ["site", "site_name"]}
                    }
                    for i in data.dict
                ]
        site_names = [
            d.get("site_name", "ERROR") for d in data
        ]
        if len(site_names) > 7:
            site_names = [*site_names[0:3], "...", *site_names[-3:]]
        _msg = [
            typer.style("Batch Add Sites:", fg="cyan"),
            typer.style(
                "\n".join([typer.style(f'  {n}', fg="bright_green" if n != "..." else "reset") for n in site_names])
            ),
            typer.style("Proceed with Site Additions?", fg="cyan")
        ]
        _msg = "\n".join(_msg)
        if yes or typer.confirm(_msg, abort=True):
            resp = central.request(central.create_site, site_list=data)
            if resp:
                cache_data = [{k.replace("site_", ""): v for k, v in d.items()} for d in resp.output]
                cache_res = asyncio.run(cli.cache.update_site_db(data=cache_data))
                if len(cache_res) != len(data):
                    log.warning(
                        "Attempted to add entries to Site Cache after batch import.  Cache Response "
                        f"{len(cache_res)} but we added {len(data)} sites.",
                        show=True
                    )

    cli.display_results(resp)
コード例 #8
0
    def get_template_identifier(
        self,
        query_str: str,
        ret_field: str = "name",
        group: str = None,
        retry: bool = True,
        multi_ok: bool = False,
        completion: bool = False,
    ) -> CentralObject:
        """Allows case insensitive template match by template name"""
        retry = False if completion else retry
        match = None
        for _ in range(0, 2 if retry else 1):
            # exact
            match = self.TemplateDB.search((self.Q.name == query_str))

            # case insensitive
            if not match:
                match = self.TemplateDB.search(
                    self.Q.name.test(lambda v: v.lower() == query_str.lower()))

            # case insensitive with -/_ swap
            if not match:
                if "_" in query_str:
                    match = self.TemplateDB.search(
                        self.Q.name.test(lambda v: v.lower() == query_str.
                                         lower().replace("_", "-")))
                elif "-" in query_str:
                    match = self.TemplateDB.search(
                        self.Q.name.test(lambda v: v.lower() == query_str.
                                         lower().replace("-", "_")))

            # startswith
            if not match:
                match = self.TemplateDB.search(
                    self.Q.name.test(
                        lambda v: v.lower().startswith(query_str.lower())))

            if retry and not match and self.central.get_all_templates not in self.updated:
                typer.secho(
                    f"No Match Found for {query_str}, Updating template Cache",
                    fg="red")
                self.check_fresh(refresh=True, template_db=True)
            if match:
                match = [CentralObject("template", tmplt) for tmplt in match]
                break

        if match:
            if completion:
                return match

            if len(match) > 1:
                if group:
                    match = [
                        d for d in match if d.group.lower() == group.lower()
                    ]

            if len(match) > 1:
                match = self.handle_multi_match(
                    match,
                    query_str=query_str,
                    query_type="template",
                    multi_ok=multi_ok,
                )

            return match[0]

        elif retry:
            log.error(
                f"Unable to gather template {ret_field} from provided identifier {query_str}",
                show=True)
            raise typer.Exit(1)
        else:
            if not completion:
                log.warning(
                    f"Unable to gather template {ret_field} from provided identifier {query_str}",
                    show=False)
コード例 #9
0
def batch_add_sites(import_file: Path, yes: bool = False) -> Response:
    central = cli.central
    name_aliases = ["site-name", "site", "name"]
    _site_aliases = {
        "site-name": "site_name",
        "site": "site_name",
        "name": "site_name",
        "latitude": "lat",
        "longitude": "lon",
        "zipcode": "zip",
    }

    def convert_site_key(_data: dict) -> dict:
        _data = {
            **_data.get("site_address", {}),
            **_data.get("geolocation", {}),
            **{k: v for k, v in _data.items() if k not in ["site_address", "geolocation"]}
        }
        _data = {_site_aliases.get(k, k): v for k, v in _data.items()}
        return _data

    data = config.get_file_data(import_file)
    if "sites" in data:
        data = data["sites"]

    resp = None
    verified_sites: List[SiteImport] = []
    # TODO test with csv ... NOT YET TESTED
    if import_file.suffix in [".csv", ".tsv", ".dbf", ".xls", ".xlsx"]:
        verified_sites = [SiteImport(**convert_site_key(i)) for i in data.dict]
    else:
        # We allow a list of flat dicts or a list of dicts where loc info is under
        # "site_address" or "geo_location"
        # can be keyed on name or flat.
        for i in data:
            if isinstance(i, str) and isinstance(data[i], dict):
                out_dict = convert_site_key(
                    {"site_name": i, **data[i]}
                )
            else:
                out_dict = convert_site_key(i)

            verified_sites += [SiteImport(**out_dict)]

    site_names = [
        f"  [cyan]{s.site_name}[/]" for s in verified_sites
    ]
    if len(site_names) > 7:
        site_names = [*site_names[0:3], "  ...", *site_names[-3:]]

    print("[bright_green]The Following Sites will be created:[/]")
    _ = [print(s) for s in site_names]

    if yes or typer.confirm("Proceed?", abort=True):
        reqs = [
            BatchRequest(central.create_site, **site.dict())
            for site in verified_sites
        ]
        resp = central.batch_request(reqs)
        if all([r.ok for r in resp]):
            resp[-1].output = [r.output for r in resp]
            resp = resp[-1]
            cache_res = asyncio.run(cli.cache.update_site_db(data=resp.output))
            if len(cache_res) != len(data):
                log.warning(
                    "Attempted to add entries to Site Cache after batch import.  Cache Response "
                    f"{len(cache_res)} but we added {len(data)} sites.",
                    show=True
                )
        return resp
コード例 #10
0
    def _display_results(
        self,
        data: Union[List[dict], List[str], dict, None] = None,
        tablefmt: str = "rich",
        title: str = None,
        caption: str = None,
        pager: bool = False,
        outfile: Path = None,
        sort_by: str = None,
        reverse: bool = False,
        stash: bool = True,
        pad: int = None,
        set_width_cols: dict = None,
        full_cols: Union[List[str], str] = [],
        fold_cols: Union[List[str], str] = [],
        cleaner: callable = None,
        **cleaner_kwargs,
    ):
        if data:
            data = utils.listify(data)

            if cleaner and not self.raw_out:
                data = cleaner(data, **cleaner_kwargs)
                data = utils.listify(data)

            if sort_by and all(isinstance(d, dict) for d in data):
                if sort_by not in data[0] and sort_by.replace("_",
                                                              " ") in data[0]:
                    sort_by = sort_by.replace("_", " ")

                if not all([True if sort_by in d else False for d in data]):
                    print(
                        f":x: [dark_orange3]Error: [cyan]{sort_by}[reset] does not appear to be a valid field"
                    )
                    print("Valid Fields:\n----------\n{}\n----------".format(
                        "\n".join(data[0].keys())))
                else:
                    try:
                        type_ = str
                        for d in data:
                            if d[sort_by] is not None:
                                type_ = type(d[sort_by])
                                break
                        data = sorted(data,
                                      key=lambda d: d[sort_by]
                                      if d[sort_by] != "-" else 0 or 0
                                      if type_ == int else "")
                    except TypeError as e:
                        print(
                            f":x: [dark_orange3]Warning:[reset] Unable to sort by [cyan]{sort_by}.\n   {e.__class__.__name__}: {e} "
                        )

            if reverse:
                data = data[::-1]

            if self.raw_out and tablefmt in ["simple", "rich"]:
                tablefmt = "json"

            # TODO make sure "account" is not valid then remove from list below
            if config.account == "account":
                log.warning("DEV NOTE account is 'account'", show=True)

            kwargs = {
                "outdata":
                data,
                "tablefmt":
                tablefmt,
                "title":
                title,
                "caption":
                caption,
                "account":
                None if config.account
                in ["central_info", "default", "account"] else config.account,
                "config":
                config,
                "set_width_cols":
                set_width_cols,
                "full_cols":
                full_cols,
                "fold_cols":
                fold_cols,
            }
            outdata = utils.output(**kwargs)

            if stash:
                config.last_command_file.write_text(
                    json.dumps(
                        {k: v
                         for k, v in kwargs.items() if k != "config"}))

            typer.echo_via_pager(outdata) if pager and tty and len(
                outdata) > tty.rows else typer.echo(outdata)

            if "Limit:" not in outdata and caption is not None and cleaner and cleaner.__name__ != "parse_caas_response":
                print(caption)

            if outfile and outdata:
                self.write_file(outfile, outdata.file)
        else:
            log.warning(f"No data passed to _display_output {title} {caption}")