Beispiel #1
0
def group(
    groups: List[str] = typer.Argument(
        ...,
        help="Group to delete (can provide more than one).",
        autocompletion=cli.cache.group_completion
    ),
    yes: bool = typer.Option(False, "-Y", help="Bypass confirmation prompts - Assume Yes"),
    yes_: bool = typer.Option(False, "-y", hidden=True),
    debug: bool = typer.Option(False, "--debug", envvar="ARUBACLI_DEBUG", help="Enable Additional Debug Logging",),
    default: bool = typer.Option(False, "-d", is_flag=True, help="Use default central account", show_default=False,),
    account: str = typer.Option("central_info",
                                envvar="ARUBACLI_ACCOUNT",
                                help="The Aruba Central Account to use (must be defined in the config)",),
) -> None:
    yes = yes_ if yes_ else yes
    groups = [cli.cache.get_group_identifier(g) for g in groups]
    reqs = [cli.central.BatchRequest(cli.central.delete_group, (g.name, )) for g in groups]

    _grp_msg = "\n".join([f"  [cyan]{g.name}[/]" for g in groups])
    _grp_msg = _grp_msg.lstrip() if len(groups) == 1 else f"\n{_grp_msg}"
    print(
        f"[bright_red]Delete[/] {'group ' if len(groups) == 1 else 'groups:'}{_grp_msg}"
    )
    if len(reqs) > 1:
        print(f"\n[italic dark_olive_green2]{len(reqs)} API calls will be performed[/]")

    if yes or typer.confirm("\nProceed?", abort=True):
        resp = cli.central.batch_request(reqs)
        cli.display_results(resp, tablefmt="action")
        if resp:
            upd_res = asyncio.run(cli.cache.update_group_db(data=[{"name": g.name} for g in groups], remove=True))
            log.debug(f"cache update to remove deleted groups returns {upd_res}")
Beispiel #2
0
    def request(self, func: callable, *args, **kwargs) -> Response:
        """non async to async wrapper for all API calls

        Args:
            func (callable): One of the CentralApi methods

        Returns:
            centralcli.response.Response object
        """
        log.debug(f"sending request to {func.__name__} with args {args}, kwargs {kwargs}")
        return asyncio.run(self._request(func, *args, **kwargs))
Beispiel #3
0
    async def _batch_request(self, api_calls: List[BatchRequest],) -> List[Response]:
        self.silent = True
        _tot_start = time.perf_counter()
        resp: Response = await api_calls[0].func(
            *api_calls[0].args,
            **api_calls[0].kwargs
            )
        if not resp or len(api_calls) == 1:
            return [resp]

        m_resp: List[Response] = [resp]

        chunked_calls = utils.chunker(api_calls, 7)

        # remove first call performed above from first chunk
        chunked_calls[0] = chunked_calls[0][1:]

        # Make calls 7 at a time ensuring timing so that 7 per second limit is not exceeded
        for chunk in chunked_calls:
            _start = time.perf_counter()

            if chunk != chunked_calls[-1]:
                _br = self.BatchRequest(self.pause, (_start,))
                chunk += [_br]
            m_resp += await asyncio.gather(
                *[call.func(*call.args, **call.kwargs) for call in chunk]
            )
            _elapsed = time.perf_counter() - _start
            log.debug(f"chunk of {len(chunk)} took {_elapsed:.2f}.")

        # strip out the pause/limitter responses (None)
        m_resp = utils.strip_none(m_resp)

        log.debug(f"Batch Requests exec {len(api_calls)} calls, Total time {time.perf_counter() - _tot_start:.2f}")

        self.silent = False

        log.debug(f"API per sec ratelimit as reported by Central: {[r.rl.remain_sec for r in m_resp]}")

        return m_resp
Beispiel #4
0
def vscode_arg_handler():

    def get_arguments_from_import(import_file: str, key: str = None) -> list:
        """Get arguments from default import_file (stored_tasks.yaml)

        Args:
            import_file (str): name of import file
            key (str, optional): return single value for specific key if provided. Defaults to None.

        Returns:
            list: updated sys.argv list.
        """
        # args = utils.read_yaml(import_file)
        args = config.get_file_data(Path(import_file))
        if key and key in args:
            args = args[key]

        sys.argv += args

        return sys.argv

    try:
        if len(sys.argv) > 1:
            if " " in sys.argv[1] or not sys.argv[1]:
                vsc_args = sys.argv.pop(1)
                if vsc_args:
                    if "\\'" in vsc_args:
                        _loc = vsc_args.find("\\'")
                        _before = vsc_args[:_loc - 1]
                        _before = _before.split()
                        _str_end = vsc_args.find("\\'", _loc + 1)
                        sys.argv += [i.rstrip(',') for i in _before if i != ',']
                        sys.argv += [f"{vsc_args[_loc + 2:_str_end]}"]
                        _the_rest = vsc_args[_str_end + 2:].split()
                        sys.argv += [i.rstrip(',') for i in _the_rest if i != ',']
                    else:
                        sys.argv += vsc_args.split()

        if len(sys.argv) > 2:
            _import_file, _import_key = None, None
            if sys.argv[2].endswith((".yaml", ".yml", "json")):
                _import_file = sys.argv.pop(2)
                if not utils.valid_file(_import_file):
                    if utils.valid_file(config.dir.joinpath(_import_file)):
                        _import_file = config.dir.joinpath(_import_file)

                if len(sys.argv) > 2:
                    _import_key = sys.argv.pop(2)

                sys.argv = get_arguments_from_import(_import_file, key=_import_key)

    except Exception as e:
        log.exception(f"Exception in vscode arg handler (arg split) {e.__class__.__name__}.{e}", show=True)
        return

    # update launch.json default if launched by vscode debugger
    try:
        # Update prev_args history file
        history_lines = None
        history_file = config.base_dir / ".vscode" / "prev_args"
        this_args = " ".join(sys.argv[1:])
        if not this_args:
            return

        if history_file.is_file() and this_args.strip():
            history_lines = history_file.read_text().splitlines()

            if this_args in history_lines:
                _ = history_lines.pop(history_lines.index(this_args))
                history_lines.insert(0, _)
            else:
                history_lines.insert(0, this_args)
                if len(history_lines) > 10:
                    _ = history_lines.pop(10)
            history_file.write_text("\n".join(history_lines) + "\n")

        # update launch.json default arg
        do_update = False
        launch_data = None
        launch_file = config.base_dir / ".vscode" / "launch.json"
        launch_file_bak = config.base_dir / ".vscode" / "launch.json.bak"
        if launch_file.is_file():
            launch_data = launch_file.read_text()
            launch_data = launch_data.splitlines()
            for idx, line in enumerate(launch_data):
                if "default" in line and "// VSC_PREV_ARGS" in line:
                    _spaces = len(line) - len(line.lstrip(" "))
                    new_line = f'{" ":{_spaces}}"default": "{this_args}"  // VSC_PREV_ARGS'
                    if line != new_line:
                        do_update = True
                        log.debug(f"changing default arg for promptString:\n"
                                  f"\t from: {line}\n"
                                  f"\t to: {new_line}"
                                  )
                        launch_data[idx] = new_line

                elif history_lines and "options" in line and "// VSC_ARG_HISTORY" in line:
                    import json
                    _spaces = len(line) - len(line.lstrip(" "))
                    new_line = f'{" ":{_spaces}}"options": {json.dumps(history_lines)},  // VSC_ARG_HISTORY'
                    if line != new_line:
                        do_update = True
                        log.debug(f"changing options arg for pickString:\n"
                                  f"\t from: {line}\n"
                                  f"\t to: {new_line}"
                                  )
                        launch_data[idx] = new_line

        if do_update and launch_data:
            # backup launch.json only if backup doesn't exist already
            if not launch_file_bak.is_file():
                import shutil
                shutil.copy(launch_file, launch_file_bak)

            # update launch.json
            launch_file.write_text("\n".join(launch_data) + "\n")

    except Exception as e:
        log.exception(f"Exception in vscode arg handler (launch.json update) {e.__class__.__name__}.{e}", show=True)
Beispiel #5
0
def method(
    method: str = typer.Argument(..., autocompletion=cli.cache.method_test_completion),
    kwargs: List[str] = typer.Argument(None),
    _help: bool = typer.Option(False, "--doc", help="Get details on required args/keyword args for provided method."),
    do_json: bool = typer.Option(False, "--json", is_flag=True, help="Output in JSON", show_default=False),
    do_yaml: bool = typer.Option(False, "--yaml", is_flag=True, help="Output in YAML", show_default=False),
    do_csv: bool = typer.Option(False, "--csv", is_flag=True, help="Output in CSV", show_default=False),
    do_table: bool = typer.Option(False, "--table", is_flag=True, help="Output in Table", show_default=False),
    outfile: Path = typer.Option(None, help="Output to file (and terminal)", writable=True),
    pager: bool = typer.Option(False, help="Enable Paged Output"),
    update_cache: bool = typer.Option(False, "-U", hidden=True),  # Force Update of cache for testing
    default: bool = typer.Option(False, "-d", is_flag=True, help="Use default central account", show_default=False,
                                 callback=cli.default_callback),
    debug: bool = typer.Option(False, "--debug", envvar="ARUBACLI_DEBUG", help="Enable Debug Logging",
                               callback=cli.debug_callback),
    debugv: bool = typer.Option(
        False, "--debugv",
        envvar="ARUBACLI_VERBOSE_DEBUG",
        help="Enable verbose Debug Logging",
        hidden=True,
        callback=cli.verbose_debug_callback,
    ),
    account: str = typer.Option(
        "central_info",
        envvar="ARUBACLI_ACCOUNT",
        help="The Aruba Central Account to use (must be defined in the config)",
        autocompletion=cli.cache.account_completion,
    ),
) -> None:
    """Dev testing commands to run CentralApi methods from command line.

    Refer to central.py and the schema generated code in the boilerplate dir
    for available calls.  Tab completion will also return available methods.
    Use --doc to get details on arguments for the provided method.

    Args:
        method (str, optional): CentralAPI method to test.
        kwargs (List[str], optional): list of args kwargs to pass to function.

    format: arg1 arg2 keyword=value keyword2=value
        or  arg1, arg2, keyword = value, keyword2=value

    Examples: cencli test method platform_get_devices all_ap

        Use --doc flag to see documentation for usage of a method.
        cencli test method platform_get_devices --doc

    Displays all attributes of Response object
    """
    # FIXME account only works if method is in central.py
    central = CentralApi(account)
    cli.cache(refresh=update_cache)
    if not hasattr(central, method):
        if account != "central_info":
            print("Testing methods only supports the --account option for methods in central.py")
            raise typer.Exit(1)
        bpdir = Path(__file__).parent / "boilerplate"
        all_calls = [
            importlib.import_module(f"centralcli.{bpdir.name}.{f.stem}") for f in bpdir.iterdir()
            if not f.name.startswith("_") and f.suffix == ".py"
        ]
        for m in all_calls:
            log.debug(f"Looking for {method} in {m.__file__.split('/')[-1]}")
            if hasattr(m.AllCalls(), method):
                central = m.AllCalls()
                break

    if not hasattr(central, method):
        typer.secho(f"{method} does not exist", fg="red")
        raise typer.Exit(1)

    if _help:
        old_ret = "Response: CentralAPI Response object"
        new_ret = "Response from Aruba Central API gateway."
        print(getattr(central, method).__doc__.replace(old_ret, new_ret))
        raise typer.Exit(0)

    kwargs = (
        "~".join(kwargs).replace("'", "").replace('"', '').replace("~=", "=").replace("=~", "=").replace(",~", "~").split("~")
    )
    args = [k if not k.isdigit() else int(k) for k in kwargs if k and "=" not in k]
    kwargs = [k.split("=") for k in kwargs if "=" in k]
    kwargs = {k[0]: k[1] if not k[1].isdigit() else int(k[1]) for k in kwargs}
    for arg in args:
        if isinstance(arg, str):
            if arg.startswith("[") and arg.endswith("]"):
                args[args.index(arg)] = [a if not a.isdigit() else int(a) for a in arg.strip("[]").split(",")]
    for k, v in kwargs.items():
        if isinstance(v, str):
            if v.startswith("[") and v.endswith("]"):
                kwargs[k] = [vv if not vv.isdigit() else int(vv) for vv in v.strip("[]").split(",")]
            if v.lower() in ["true", "false"]:
                kwargs[k] = True if v.lower() == "true" else False

    from rich.console import Console
    c = Console(file=outfile)

    req = (
        f"central.{method}({', '.join(str(a) for a in args)}{', ' if args else ''}"
        f"{', '.join([f'{k}={kwargs[k]}' for k in kwargs]) if kwargs else ''})"
    )

    resp = central.request(getattr(central, method), *args, **kwargs)
    if "should be str" in resp.output and "bool" in resp.output:
        c.log(f"{resp.output}.  LAME!  Converting to str!")
        args = tuple([str(a).lower() if isinstance(a, bool) else a for a in args])
        kwargs = {k: str(v).lower() if isinstance(v, bool) else v for k, v in kwargs.items()}
        resp = central.request(getattr(central, method), *args, **kwargs)

    attrs = {
        k: v for k, v in resp.__dict__.items() if k not in ["output", "raw"] and (log.DEBUG or not k.startswith("_"))
    }

    c.print(req)
    c.print("\n".join([f"  {k}: {v}" for k, v in attrs.items()]))

    tablefmt = cli.get_format(
        do_json, do_yaml, do_csv, do_table, default="yaml"
    )

    if resp.raw and resp.output != resp.raw:
        typer.echo(f"\n{typer.style('CentralCLI Response Output', fg='bright_green')}:")
        cli.display_results(data=resp.output, tablefmt=tablefmt, pager=pager, outfile=outfile)
    if resp.raw:
        typer.echo(f"\n{typer.style('Raw Response Output', fg='bright_green')}:")
        cli.display_results(data=resp.raw, tablefmt="json", pager=pager, outfile=outfile)
Beispiel #6
0
def batch_add_groups(import_file: Path, yes: bool = False) -> List[Response]:
    console = Console(emoji=False)
    br = cli.central.BatchRequest
    data = config.get_file_data(import_file)
    # TODO handle csv
    if isinstance(data, dict) and "groups" in data:
        data = data["groups"]
    reqs, gw_reqs, ap_reqs = [], [], []
    pre_cfgs = []
    _pre_config_msg = ""
    cache_data = []
    for group in data:
        if "allowed-types" in data[group]:
            data[group]["allowed_types"] = data[group]["allowed-types"]
            del data[group]["allowed-types"]

        try:
            g = GroupImport(**{"group": group, **data[group]})
        except ValidationError as e:
            print(e)
            raise typer.Exit(1)
        reqs += [
            br(
                cli.central.create_group,
                g.group,
                allowed_types=g.allowed_types,
                wired_tg=g.wired_tg,
                wlan_tg=g.wlan_tg,
                aos10=g.aos10,
                microbranch=g.microbranch,
                gw_role=g.gw_role,
                monitor_only_sw=g.monitor_only_sw,
                monitor_only_cx=g.monitor_only_cx,
            )
        ]
        cache_data += [
            {"name": g.group, "template group": {"Wired": g.wired_tg, "Wireless": g.wlan_tg}}
        ]
        for dev_type, cfg_file, var_file in zip(["gw", "ap"], [g.gw_config, g.ap_config], [g.gw_vars, g.ap_vars]):
            if cfg_file is not None:
                pc = _build_pre_config(g.group, dev_type=dev_type, cfg_file=cfg_file, var_file=var_file)
                pre_cfgs += [pc]
                _pre_config_msg += (
                    f"  [bright_green]{len(pre_cfgs)}[/]. [cyan]{g.group}[/] {'gateways' if dev_type == 'gw' else 'AP'} "
                    f"group level will be configured based on [cyan]{cfg_file.name}[/]\n"
                )
                if dev_type == "gw":
                    gw_reqs += [pc.request]
                else:
                    ap_reqs += [pc.request]

    print(f"[bright_green]The following groups will be created:[/]")
    _ = [print(f"  [cyan]{g}[/]") for g in data]

    _pre_config_msg = (
        "\n[bright_green]Group level configurations will be sent:[/]\n"
        f"{_pre_config_msg}"
        f"\n[italic dark_olive_green2]{len(reqs) + len(gw_reqs) + len(ap_reqs)} API calls will be performed.[/]\n"
    )
    print(_pre_config_msg)
    for idx in range(len(pre_cfgs) + 1):
        if idx > 0:
            print(_pre_config_msg)
        print(f"Select [bright_green]#[/] to display config to be sent or [bright_green]go[/] to continue.")
        ch = utils.ask(
            ">",
            console=console,
            choices=[*[str(idx) for idx in range(1, len(pre_cfgs) + 1)], "abort", "go"],
        )
        if ch.lower() == "go":
            yes = True
            break
        else:
            pc: PreConfig = pre_cfgs[int(ch) - 1]
            console.rule(f"Config to be sent to {pc.name}")
            with console.pager():
                console.print(pc.config)
            console.rule(f" End {pc.name} config ")

    if reqs and yes or typer.confirm("Proceed?", abort=True):
        resp = cli.central.batch_request(reqs)
        if all(r.ok for r in resp):
            cache_resp = asyncio.run(cli.cache.update_group_db(cache_data))
            log.debug(f"batch add group cache resp: {cache_resp}")
        cli.display_results(resp)
        if gw_reqs:
            print("\n[bright_green]Results from Group level gateway config push (CLI commands)[/]")
            print("\n  [italic]This can take some time.[/]")
            resp = cli.central.batch_request(gw_reqs)
            cli.display_results(resp, cleaner=cleaner.parse_caas_response)
        if ap_reqs:
            print("\n[bright_green]Results from Group level AP config push (Replaces entire group level)[/]\n")
            resp = cli.central.batch_request(ap_reqs)
            cli.display_results(resp,  tablefmt="action")
Beispiel #7
0
            f"         {rtxt} 'SNAN'\n"
            "      '%H-1%'    will split the hostname into parts separating on '-' and use\n"
            f"         the firt segment.  {rtxt} 'SNANTX\n"
            f"      '%p%'    represents the interface.  {rtxt} '7'\n"
            "                   note: an interface in the form 1/1/12 is converted to 1_1_12\n"
            "       '%p/3%    seperates the port string on / and uses the 3rd segment.\n"
            "        '%m% or %m[-4] = last 4 digits of the AP MAC\n"
            "        '%m:1% would split on : and take the 1st segment.\n")
        fstr = typer.prompt("Enter Desired format string:")
        do_lldp_rename(fstr)
    else:
        typer.secho(
            "import file Argument is required if --lldp flag not provided",
            fg="red")

    cli.display_results(resp)


@app.callback()
def callback():
    """
    Perform batch operations.
    """
    pass


log.debug(f'{__name__} called with Arguments: {" ".join(sys.argv)}')

if __name__ == "__main__":
    app()
Beispiel #8
0
    async def api_call(self,
                       url: str,
                       data: dict = None,
                       json_data: Union[dict, list] = None,
                       method: str = "GET",
                       headers: dict = {},
                       params: dict = {},
                       callback: callable = None,
                       callback_kwargs: Any = {},
                       **kwargs: Any) -> Response:

        # Debugging flag to lower paging limit to test paging with smaller chunks.
        if params and params.get("limit") and config.limit:
            log.info(
                f'paging limit being overriden by config: {params.get("limit")} --> {config.limit}'
            )
            params[
                "limit"] = config.limit  # for debugging can set a smaller limit in config to test paging

        # allow passing of default kwargs (None) for param/json_data, all keys with None Value are stripped here.
        # supports 2 levels beyond that needs to be done in calling method.
        params = utils.strip_none(params)
        json_data = utils.strip_none(json_data)
        if json_data:  # strip second nested dict if all keys = NoneType
            y = json_data.copy()
            for k in y:
                if isinstance(y[k], dict):
                    y[k] = utils.strip_none(y[k])
                    if not y[k]:
                        del json_data[k]

        # Output pagination loop
        paged_output = None
        while True:
            # -- // Attempt API Call \\ --
            r = await self.exec_api_call(url,
                                         data=data,
                                         json_data=json_data,
                                         method=method,
                                         headers=headers,
                                         params=params,
                                         **kwargs)

            if not r.ok:
                break

            # data cleaner methods to strip any useless columns, change key names, etc.
            elif callback is not None:
                # TODO [remove] moving callbacks to display output in cli, leaving methods to return raw output
                log.debug(
                    f"DEV NOTE CALLBACK IN centralapi lib {url} -> {callback}")
                r.output = callback(r.output, **callback_kwargs or {})

            # -- // paging \\ --
            if not paged_output:
                paged_output = r.output
            else:
                if isinstance(r.output, dict):
                    paged_output = {**paged_output, **r.output}
                else:
                    paged_output += r.output

            _limit = params.get("limit", 0)
            _offset = params.get("offset", 0)
            if params.get("limit") and len(r.output) == _limit:
                params["offset"] = _offset + _limit
            else:
                r.output = paged_output
                break

        return r
Beispiel #9
0
    async def exec_api_call(self,
                            url: str,
                            data: dict = None,
                            json_data: Union[dict, list] = None,
                            method: str = "GET",
                            headers: dict = {},
                            params: dict = {},
                            **kwargs) -> Response:
        auth = self.auth
        resp, spin = None, None
        _data_msg = ' ' if not url else f' [{url.split("arubanetworks.com/")[-1]}]'
        spin_txt_run = "Collecting Data..."
        spin_txt_fail = f"Collecting Data{_data_msg}"
        for _ in range(0, 2):
            if _ > 0:
                spin_txt_run += f" retry {_}"

            log.debug(
                f"Attempt API Call to:{_data_msg}Try: {_ + 1}\n"
                f"    access token: {auth.central_info.get('token', {}).get('access_token', {})}\n"
                f"    refresh token: {auth.central_info.get('token', {}).get('refresh_token', {})}"
            )

            try:
                with Halo(spin_txt_run, enabled=bool(utils.tty)) as spin:
                    _start = time.time()
                    headers = self.headers if not headers else {
                        **self.headers,
                        **headers
                    }
                    # -- // THE API REQUEST \\ --
                    resp = await self.aio_session.request(method=method,
                                                          url=url,
                                                          params=params,
                                                          data=data,
                                                          json=json_data,
                                                          headers=headers,
                                                          ssl=self.ssl,
                                                          **kwargs)

                    elapsed = time.time() - _start

                    try:
                        output = await resp.json()
                        try:
                            raw_output = output.copy()
                        except AttributeError:
                            raw_output = output
                        output = cleaner.strip_outer_keys(output)
                    except (json.decoder.JSONDecodeError, ContentTypeError):
                        output = raw_output = await resp.text()

                resp = Response(resp,
                                output=output,
                                raw=raw_output,
                                elapsed=elapsed)
            except Exception as e:
                resp = Response(error=str(e), url=url)
                _ += 1

            fail_msg = spin_txt_fail if self.silent else f"{spin_txt_fail}\n  {resp.output}"
            if not resp:
                spin.fail(fail_msg)
                if "invalid_token" in resp.output:
                    self.refresh_token()
                else:
                    log.error(
                        f"API [{method}] {url} Error Returned: {resp.error}")
                    break
            else:
                # spin.succeed()
                spin.stop()
                break

        return resp
Beispiel #10
0
 async def pause(start: float) -> None:
     _elapsed = time.perf_counter() - start
     _pause = (int(_elapsed) + 1) - _elapsed
     log.debug("PAUSE {_pause:.2f}s...")
     time.sleep(_pause)
Beispiel #11
0
    async def api_call(self, url: str, data: dict = None, json_data: Union[dict, list] = None,
                       method: str = "GET", headers: dict = {}, params: dict = {}, callback: callable = None,
                       callback_kwargs: Any = {}, count: int = None, **kwargs: Any) -> Response:

        # TODO cleanup, if we do strip_none here can remove from calling funcs.
        params = utils.strip_none(params)

        # Debugging flag to lower paging limit to test paging with smaller chunks.
        if params and params.get("limit") and config.limit:
            log.info(f'paging limit being overridden by config: {params.get("limit")} --> {config.limit}')
            params["limit"] = config.limit  # for debugging can set a smaller limit in config to test paging

        # allow passing of default kwargs (None) for param/json_data, all keys with None Value are stripped here.
        # supports 2 levels beyond that needs to be done in calling method.
        json_data = utils.strip_none(json_data)
        if json_data:  # strip second nested dict if all keys = NoneType
            y = json_data.copy()
            for k in y:
                if isinstance(y, dict) and isinstance(y[k], dict):
                    y[k] = utils.strip_none(y[k])
                    if not y[k]:
                        del json_data[k]

        # Output pagination loop
        paged_output = None
        paged_raw = None
        while True:
            # -- // Attempt API Call \\ --
            r = await self.exec_api_call(url, data=data, json_data=json_data, method=method, headers=headers,
                                         params=params, **kwargs)
            if not r.ok:
                break

            # data cleaner methods to strip any useless columns, change key names, etc.
            elif callback is not None:
                # TODO [remove] moving callbacks to display output in cli, leaving methods to return raw output
                log.debug(f"DEV NOTE CALLBACK IN centralapi lib {r.url.path} -> {callback}")
                r.output = callback(r.output, **callback_kwargs or {})

            # -- // paging \\ --
            if not paged_output:
                paged_output = r.output
            else:
                if isinstance(r.output, dict):
                    paged_output = {**paged_output, **r.output}
                else:  # FIXME paged_output += r.output was also changed contents of paged_raw dunno why
                    paged_output = paged_output + r.output

            if not paged_raw:
                paged_raw = r.raw
            else:
                if isinstance(r.raw, dict):
                    for outer_key in constants.STRIP_KEYS:
                        if outer_key in r.raw and outer_key in paged_raw:
                            if isinstance(r.raw[outer_key], dict):
                                paged_raw[outer_key] = {**paged_raw[outer_key], **r.raw[outer_key]}
                            else:  # TODO use response magic method to do adds have Response figure this out
                                paged_raw[outer_key] += r.raw[outer_key]
                            break
                else:
                    paged_raw += r.raw

            _limit = params.get("limit", 0)
            _offset = params.get("offset", 0)
            if params.get("limit") and len(r.output) == _limit:
                if count and len(paged_output) >= count:
                    r.output = paged_output
                    r.raw = paged_raw
                    break
                elif count and len(paged_output) < count:
                    next_limit = count - len(paged_output)
                    next_limit = _limit if next_limit > _limit else next_limit
                    params["offset"] = _offset + next_limit
                else:
                    params["offset"] = _offset + _limit
            else:
                r.output = paged_output
                r.raw = paged_raw
                break

        return r
Beispiel #12
0
    async def exec_api_call(self, url: str, data: dict = None, json_data: Union[dict, list] = None,
                            method: str = "GET", headers: dict = {}, params: dict = {}, **kwargs) -> Response:
        auth = self.auth
        resp = None
        # _url = URL(url).with_query({k: v for k, v in params.items() if k in {"offset", "limit"}})
        _url = URL(url).with_query(params)
        _data_msg = ' ' if not url else f' [{_url.path}]'
        run_sfx = '' if self.req_cnt == 1 else f' Request: {self.req_cnt}'
        spin_word = "Collecting" if method == "GET" else "Sending"
        spin_txt_run = f"{spin_word} Data...{run_sfx}"
        spin_txt_retry = ""
        spin_txt_fail = f"{spin_word} Data{_data_msg}"
        self.spinner.text = spin_txt_run
        for _ in range(0, 2):
            if _ > 0:
                spin_txt_run = f"{spin_txt_run} {spin_txt_retry}".rstrip()

            token_msg = (
                f"\n    access token: {auth.central_info.get('token', {}).get('access_token', {})}"
                f"\n    refresh token: {auth.central_info.get('token', {}).get('refresh_token', {})}"
            )
            log.debug(
                f"Attempt API Call to:{_data_msg}Try: {_ + 1}{token_msg if self.req_cnt == 1 else ''}"
            )
            if config.debugv:
                call_data = {
                    "method": method,
                    "url": url,
                    "url_params": params,
                    "data": data,
                    "json_data": json_data,
                }
                if kwargs:
                    call_data["Additional kwargs"] = kwargs
                print("[bold magenta]VERBOSE DEBUG[reset]")
                call_data = utils.strip_none(call_data, strip_empty_obj=True)
                utils.json_print(call_data)

            headers = self.headers if not headers else {**self.headers, **headers}
            try:
                req_log = LoggedRequests(_url.path_qs, method)

                # -- // THE API REQUEST \\ --
                #  -- // RATE LIMIT TEST \\ --
                # await self.wait_for_token()
                _start = time.monotonic()
                now = time.monotonic() - INIT_TS
                _try_cnt = [u.url for u in self.requests].count(_url.path_qs) + 1
                self.rl_log += [
                    f'{now:.2f} [{method}]{_url.path_qs} Try: {_try_cnt}'
                ]
                #  -- // RATE LIMIT TEST \\ --
                self.spinner.start(spin_txt_run)
                self.req_cnt += 1
                # TODO move batch_request _bacth_request, get, put, etc into Session
                # change where client is instantiated to _request / _batch_requests pass in the client
                # remove aio_session property call ClientSession() direct
                async with self.aio_session as client:
                    resp = await client.request(
                        method=method,
                        url=url,
                        params=params,
                        data=data,
                        json=json_data,
                        headers=headers,
                        ssl=self.ssl,
                        **kwargs
                    )
                    elapsed = time.monotonic() - _start
                    self.requests += [req_log.update(resp)]

                    try:
                        output = await resp.json()
                        try:
                            raw_output = output.copy()
                        except AttributeError:
                            raw_output = output

                        # Strip outer key sent by central
                        output = cleaner.strip_outer_keys(output)
                    except (json.decoder.JSONDecodeError, ContentTypeError):
                        output = raw_output = await resp.text()

                    resp = Response(resp, output=output, raw=raw_output, elapsed=elapsed)

            except Exception as e:
                resp = Response(error=str(e), url=_url.path_qs)
                _ += 1

            fail_msg = spin_txt_fail if self.silent else f"{spin_txt_fail}\n  {resp.output}"
            if not resp:
                self.spinner.fail(fail_msg)
                if "invalid_token" in resp.output:
                    spin_txt_retry =  "(retry after token refresh)"
                    self.refresh_token()
                elif resp.status == 429:  # per second rate limit.
                    spin_txt_retry = "(retry after hitting per second rate limit)"
                    self.rl_log += [f"{now:.2f} [:warning: [bright_red]RATE LIMIT HIT[/]] p/s: {resp.rl.remain_sec}: {_url.path_qs}"]
                    _ -= 1
                else:  # else
                    break
            else:
                if resp.rl.near_sec:
                    self.rl_log += [
                        f"{time.monotonic() - INIT_TS:.2f} [[bright_green]{resp.error}[/] but [dark_orange3]NEARING RATE LIMIT[/]] p/s: {resp.rl.remain_sec} {_url.path_qs}"
                    ]
                else:
                    self.rl_log += [
                        f"{time.monotonic() - INIT_TS:.2f} [[bright_green]{resp.error}[/]] p/s: {resp.rl.remain_sec} {_url.path_qs}"
                    ]
                self.spinner.stop()
                break

        return resp