Example #1
0
    def get_adapters_via_api(self,
                             ip: str,
                             port: int = 5000,
                             rename: bool = False,
                             log_host: str = None):
        """Send RestFul GET request to Remote ConsolePi to collect adapter info

        params:
        ip(str): ip address or FQDN of remote ConsolePi
        rename(bool): TODO
        log_host(str): friendly string for logging purposes "hostname(ip)"

        returns:
        adapter dict for remote if successful and adapters exist
        status_code 200 if successful but no adapters or Falsey or response status_code if an error occurred.
        """
        if not log_host:
            log_host = ip
        url = f"http://{ip}:{port}/api/v1.0/adapters"
        if rename:
            url = f"{url}?refresh=true"

        log.debug(url)

        headers = {
            "Accept": "*/*",
            "Cache-Control": "no-cache",
            "Host": f"{ip}:{port}",
            "accept-encoding": "gzip, deflate",
            "Connection": "keep-alive",
            "cache-control": "no-cache",
        }

        try:
            response = requests.request("GET",
                                        url,
                                        headers=headers,
                                        timeout=config.remote_timeout)
        except (OSError, TimeoutError):
            log.warning(
                f"[API RQST OUT] Remote ConsolePi: {log_host} TimeOut when querying via API - Unreachable."
            )
            return False

        if response.ok:
            ret = response.json()
            ret = ret["adapters"] if ret["adapters"] else response.status_code
            _msg = f"Adapters Successfully retrieved via API for Remote ConsolePi: {log_host}"
            log.info("[API RQST OUT] {}".format(_msg))
            log.debugv("[API RQST OUT] Response: \n{}".format(
                json.dumps(ret, indent=4, sort_keys=True)))
        else:
            ret = response.status_code
            log.error(
                f"[API RQST OUT] Failed to retrieve adapters via API for Remote ConsolePi: {log_host}\n{ret}:{response.text}"
            )
        return ret
Example #2
0
    def get_adapters_via_api(self, ip: str, rename: bool = False):
        """Send RestFul GET request to Remote ConsolePi to collect adapter info

        params:
        ip(str): ip address or FQDN of remote ConsolePi

        returns:
        adapter dict for remote if successful
        Falsey or response status_code if an error occured.
        """
        # log = self.config.log
        if rename:
            url = f"http://{ip}:5000/api/v1.0/adapters?refresh=true"
        else:
            url = f"http://{ip}:5000/api/v1.0/adapters"
        log.info(url)  # DEBUG

        headers = {
            "Accept": "*/*",
            "Cache-Control": "no-cache",
            "Host": f"{ip}:5000",
            "accept-encoding": "gzip, deflate",
            "Connection": "keep-alive",
            "cache-control": "no-cache",
        }

        try:
            response = requests.request("GET",
                                        url,
                                        headers=headers,
                                        timeout=config.remote_timeout)
        except (OSError, TimeoutError):
            log.warning(
                "[API RQST OUT] Remote ConsolePi @ {} TimeOut when querying via API - Unreachable."
                .format(ip))
            return False

        if response.ok:
            ret = response.json()
            ret = ret["adapters"] if ret["adapters"] else response.status_code
            _msg = "Adapters Successfully retrieved via API for Remote ConsolePi @ {}".format(
                ip)
            log.info("[API RQST OUT] {}".format(_msg))
            log.debugv("[API RQST OUT] Response: \n{}".format(
                json.dumps(ret, indent=4, sort_keys=True)))
        else:
            ret = response.status_code
            log.error(
                "[API RQST OUT] Failed to retrieve adapters via API for Remote ConsolePi @ {}\n{}:{}"
                .format(ip, ret, response.text))
        return ret
Example #3
0
    def do_api_request(self, ip: str, path: str, *args, **kwargs):
        """Send RestFul GET request to Remote ConsolePi to collect data

        params:
        ip(str): ip address or FQDN of remote ConsolePi
        path(str): path beyond /api/v1.0/

        returns:
        response object
        """
        url = f"http://{ip}:5000/api/v1.0/{path}"
        log.debug(f'[do_api_request] URL: {url}')

        headers = {
            "Accept": "*/*",
            "Cache-Control": "no-cache",
            "Host": f"{ip}:5000",
            "accept-encoding": "gzip, deflate",
            "Connection": "keep-alive",
            "cache-control": "no-cache",
        }

        try:
            response = requests.request("GET",
                                        url,
                                        headers=headers,
                                        timeout=config.remote_timeout)
        except (OSError, TimeoutError):
            log.warning(
                f"[API RQST OUT] Remote ConsolePi @ {ip} TimeOut when querying via API - Unreachable."
            )
            return False

        if response.ok:
            log.info(f"[API RQST OUT] {url} Response: OK")
            log.debugv(
                f"[API RQST OUT] Response: \n{json.dumps(response.json(), indent=4, sort_keys=True)}"
            )
        else:
            log.error(f"[API RQST OUT] API Request Failed {url}")

        return response
Example #4
0
    def get_if_info(self):
        '''Build and return dict with interface info.'''
        if_list = [
            i for i in ni.interfaces() if i != 'lo' and 'docker' not in i
        ]
        if_w_gw = ni.gateways()['default'].get(ni.AF_INET, {1: None})[1]
        if_data = {
            _if: {
                'ip': ni.ifaddresses(_if).get(ni.AF_INET,
                                              {0: {}})[0].get('addr'),
                'mac': ni.ifaddresses(_if).get(ni.AF_LINK,
                                               {0: {}})[0].get('addr'),
                'isgw': True if _if == if_w_gw else False
            }
            for _if in if_list
            if ni.ifaddresses(_if).get(ni.AF_INET, {0: {}})[0].get('addr')
        }

        if_data['_ip_w_gw'] = if_data.get(if_w_gw, {'ip': None})['ip']
        log.debugv('[GET IFACES] Completed Iface Data: {}'.format(if_data))
        return if_data
Example #5
0
    def update_files(self, data):
        for x in data[self.hostname]['adapters']:
            if 'udev' in data[self.hostname]['adapters'][x]:
                del data[self.hostname]['adapters'][x]['udev']

        log.debugv('[GDRIVE]: -->update_files - data passed to function\n{}'.format(json.dumps(data, indent=4, sort_keys=True)))
        if not self.auth():
            return 'Gdrive-Error: Unable to Connect to Gdrive refer to cloud log for details'
        spreadsheet_id = self.file_id
        service = self.sheets_svc

        # init remote_consoles dict, any entries in config not matching this ConsolePis hostname are added as remote ConsolePis
        value_input_option = 'USER_ENTERED'
        remote_consoles = {}
        cnt = 1
        data[self.hostname]['upd_time'] = int(time.time())  # Put timestamp (epoch) on data for this ConsolePi
        for k in data:
            found = False
            value_range_body = {
                "values": [
                    [
                        k,
                        json.dumps(data[k])
                    ]
                ]
            }

            # find out if this ConsolePi already has a row use that row in range
            request = service.spreadsheets().values().get(
                spreadsheetId=spreadsheet_id, range='A:B')
            result = self.exec_request(request)
            log.info('[GDRIVE]: Reading from Cloud Config')
            if result.get('values') is not None:
                x = 1
                for row in result.get('values'):
                    if k == row[0]:  # k is hostname row[0] is column A of current row
                        cnt = x
                        found = True
                    else:
                        log.info('[GDRIVE]: {0} found in Google Drive Config'.format(row[0]))
                        remote_consoles[row[0]] = json.loads(row[1])
                        remote_consoles[row[0]]['source'] = 'cloud'
                    x += 1
                log.debugv(f'[GDRIVE]: {len(remote_consoles)} Remote ConsolePis Found on Gdrive: \n{json.dumps(remote_consoles)}')
            range_ = 'a' + str(cnt) + ':b' + str(cnt)

            # -- // Update gdrive with this ConsolePis data \\ --
            if not config.cloud_pull_only:
                if found:
                    log.info('[GDRIVE]: Updating ' + str(k) + ' data found on row ' + str(cnt) + ' of Google Drive config')
                    request = service.spreadsheets().values().update(spreadsheetId=spreadsheet_id, range=range_,
                                                                     valueInputOption=value_input_option, body=value_range_body)
                else:
                    log.info('[GDRIVE]: Adding ' + str(k) + ' to Google Drive Config')
                    request = service.spreadsheets().values().append(spreadsheetId=spreadsheet_id, range=range_,
                                                                     valueInputOption=value_input_option, body=value_range_body)
                self.exec_request(request)
            else:
                log.info('cloud_pull_only override enabled not updating cloud with data from this host')
            cnt += 1
        self.resize_cols()
        return remote_consoles
Example #6
0
    def update_local_cloud_file(self,
                                remote_consoles=None,
                                current_remotes=None,
                                local_cloud_file=None):
        """Update local cloud cache (cloud.json).

        Verifies the newly discovered data is more current than what we already know and updates the local cloud.json file if so
        The Menu uses cloud.json to populate remote menu items

        params:
            remote_consoles: The newly discovered data (from Gdrive or mdns)
            current_remotes: The current remote data fetched from the local cloud cache (cloud.json)
                - func will retrieve this if not provided
            local_cloud_file The path to the local cloud file (global var cloud.json)

        returns:
        dict: The resulting remote console dict representing the most recent data for each remote.
        """
        local_cloud_file = (config.static.get("LOCAL_CLOUD_FILE")
                            if local_cloud_file is None else local_cloud_file)

        if len(remote_consoles) > 0:
            if current_remotes is None:
                current_remotes = self.data = config.remote_update(
                )  # grabs the remote data from local cloud cache

        # update current_remotes dict with data passed to function
        if len(remote_consoles) > 0:
            if current_remotes is not None:
                for _ in current_remotes:
                    if _ not in remote_consoles:
                        if ("fail_cnt" not in current_remotes[_]
                                or current_remotes[_]["fail_cnt"] < 2):
                            remote_consoles[_] = current_remotes[_]
                        elif (remote_consoles.get(_)
                              and "fail_cnt" not in remote_consoles[_]
                              and "fail_cnt" in current_remotes[_]):
                            remote_consoles[_]["fail_cnt"] = current_remotes[
                                _]["fail_cnt"]
                    else:

                        # -- VERBOSE DEBUG --
                        log.debugv(
                            "[CACHE UPD] \n--{}-- \n    remote upd_time: {}\n    remote rem_ip: {}\n    remote source: {}\n    cache rem upd_time: {}\n    cache rem_ip: {}\n    cache source: {}\n"
                            .format(  # NoQA
                                _,
                                time.strftime(
                                    "%a %x %I:%M:%S %p %Z",
                                    time.localtime(
                                        remote_consoles[_]["upd_time"]),
                                ) if "upd_time" in remote_consoles[_] else
                                None,  # NoQA
                                remote_consoles[_]["rem_ip"]
                                if "rem_ip" in remote_consoles[_] else None,
                                remote_consoles[_]["source"]
                                if "source" in remote_consoles[_] else None,
                                time.strftime(
                                    "%a %x %I:%M:%S %p %Z",
                                    time.localtime(
                                        current_remotes[_]["upd_time"]),
                                ) if "upd_time" in current_remotes[_] else
                                None,  # NoQA
                                current_remotes[_]["rem_ip"]
                                if "rem_ip" in current_remotes[_] else None,
                                current_remotes[_]["source"]
                                if "source" in current_remotes[_] else None,
                            ))
                        # -- END VERBOSE DEBUG --

                        # No Change Detected (data passed to function matches cache)
                        if "last_ip" in current_remotes[_]:
                            del current_remotes[_]["last_ip"]
                        if remote_consoles[_] == current_remotes[_]:
                            log.debug(
                                "[CACHE UPD] {} No Change in info detected".
                                format(_))

                        # only factor in existing data if source is not mdns
                        elif ("upd_time" in remote_consoles[_]
                              or "upd_time" in current_remotes[_]):
                            if ("upd_time" in remote_consoles[_]
                                    and "upd_time" in current_remotes[_]):
                                if (current_remotes[_]["upd_time"] >
                                        remote_consoles[_]["upd_time"]):
                                    remote_consoles[_] = current_remotes[_]
                                    log.info(
                                        f"[CACHE UPD] {_} Keeping existing data from {current_remotes[_].get('source', '')} "
                                        "based on more current update time")
                                elif (remote_consoles[_]["upd_time"] >
                                      current_remotes[_]["upd_time"]):
                                    log.info(
                                        "[CACHE UPD] {} Updating data from {} "
                                        "based on more current update time".
                                        format(_,
                                               remote_consoles[_]["source"]))
                                else:  # -- Update Times are equal --
                                    if (current_remotes[_].get("adapters") and
                                            remote_consoles[_].get("adapters")
                                            and current_remotes[_]["adapters"].
                                            keys() != remote_consoles[_]
                                        ["adapters"].keys()
                                        ) or remote_consoles[_].get(
                                            "interfaces",
                                            {}) != current_remotes[_].get(
                                                "interfaces", {}):
                                        log.warning(
                                            "[CACHE UPD] {} current cache update time and {} update time are equal"
                                            " but data appears to have changed. Updating"
                                            .format(
                                                _,
                                                remote_consoles[_]["source"]))
                            elif "upd_time" in current_remotes[_]:
                                remote_consoles[_] = current_remotes[_]
                                log.info(
                                    "[CACHE UPD] {} Keeping existing data based *existence* of update time "
                                    "which is lacking in this update from {}".
                                    format(_, remote_consoles[_]["source"]))

            for _try in range(0, 2):
                try:
                    with open(local_cloud_file, "w") as cloud_file:
                        cloud_file.write(
                            json.dumps(remote_consoles,
                                       indent=4,
                                       sort_keys=True))
                        utils.set_perm(
                            local_cloud_file
                        )  # a hack to deal with perms ~ consolepi-details del func
                        break
                except PermissionError:
                    utils.set_perm(local_cloud_file)

        else:
            log.warning(
                "[CACHE UPD] cache update called with no data passed, doing nothing"
            )

        return remote_consoles
Example #7
0
    def refresh(self, bypass_cloud=False):
        remote_consoles = None
        cpiexec = self.cpiexec
        local = self.local
        cloud_svc = config.cfg.get("cloud_svc", "error")

        # TODO refactor wait_for_threads to have an all key or accept a list
        with Halo(text="Waiting For threads to complete", spinner="dots1"):
            if cpiexec.wait_for_threads(thread_type="remotes") and (
                    config.power
                    and cpiexec.wait_for_threads(name="_toggle_refresh")):
                log.show(
                    "Timeout Waiting for init or toggle threads to complete try again later or"
                    " investigate logs")
                return

        # -- // Update/Refresh Local Data (Adapters/Interfaces) \\ --
        local.data = local.build_local_dict(refresh=True)
        log.debugv(
            f"Final Data set collected for {local.hostname}: {local.data}")

        # -- // Get details from Google Drive - once populated will skip \\ --
        if not bypass_cloud and self.do_cloud and not self.local_only:
            if cloud_svc == "gdrive" and self.cloud is None:
                # burried import until I find out why this import takes so @#%$@#% long.  Not imported until 1st refresh is called
                with Halo(text="Loading Google Drive Library",
                          spinner="dots1"):
                    from consolepi.gdrive import GoogleDrive
                self.cloud = GoogleDrive(hostname=local.hostname)
                log.info("[MENU REFRESH] Gdrive init")

            # Pass Local Data to update_sheet method get remotes found on sheet as return
            # update sheets function updates local_cloud_file
            _msg = "[MENU REFRESH] Updating to/from {}".format(cloud_svc)
            log.info(_msg)
            if stdin.isatty():
                self.spin.start(_msg)
            # -- // SYNC DATA WITH GDRIVE \\ --
            remote_consoles = self.cloud.update_files(
                local.data)  # local data refreshed above
            if remote_consoles and "Gdrive-Error:" not in remote_consoles:
                if stdin.isatty():
                    self.spin.succeed(_msg +
                                      "\n\tFound {} Remotes via Gdrive Sync".
                                      format(len(remote_consoles)))
                    for r in remote_consoles:
                        # -- Convert Any Remotes with old API schema to new API schema --
                        if isinstance(remote_consoles[r].get("adapters", {}),
                                      list):
                            remote_consoles[r][
                                "adapters"] = self.convert_adapters(
                                    remote_consoles[r]["adapters"])
                            log.warning(
                                f"Adapter data for {r} retrieved from cloud in old API format... Converted"
                            )
            elif "Gdrive-Error:" in remote_consoles:
                if stdin.isatty():
                    self.spin.fail("{}\n\t{} {}".format(
                        _msg, self.log_sym_error, remote_consoles))
                log.show(remote_consoles
                         )  # display error returned from gdrive module
                remote_consoles = []
            else:
                if stdin.isatty():
                    self.spin.warn(_msg +
                                   "\n\tNo Remotes Found via Gdrive Sync")

            if len(remote_consoles) > 0:
                _msg = f"[MENU REFRESH] Updating Local Cache with data from {cloud_svc}"
                log.info(_msg)
                if stdin.isatty():
                    self.spin.start(_msg)
                self.update_local_cloud_file(remote_consoles)
                if stdin.isatty():
                    self.spin.succeed(_msg)  # no real error correction here
            else:
                log.warning(
                    f"[MENU REFRESH] No Remote ConsolePis found on {cloud_svc}",
                    show=True,
                )
        else:
            if self.do_cloud and not bypass_cloud:
                log.show(
                    f"Not Updating from {cloud_svc} due to connection failure\n"
                    "Close and re-launch menu if network access has been restored"
                )

        # Update Remote data with data from local_cloud cache / cloud
        self.data = self.get_remote(data=remote_consoles)
Example #8
0
    def on_service_state_change(self, zeroconf: Zeroconf, service_type: str,
                                name: str,
                                state_change: ServiceStateChange) -> None:
        cpi = self.cpi
        mdns_data = None
        update_cache = False
        if state_change is ServiceStateChange.Added:
            info = zeroconf.get_service_info(service_type, name)
            if info:
                if info.server.split('.')[0] != cpi.local.hostname:
                    if info.properties:
                        properties = info.properties

                        try:
                            mdns_data = {
                                k.decode('UTF-8'):
                                v.decode('UTF-8') if len(v) == 0
                                or not v.decode('UTF-8')[0] in ['[', '{'] else
                                json.loads(v.decode('UTF-8'))  # NoQA
                                for k, v in properties.items()
                            }
                        except Exception as e:
                            log.exception(
                                f"[MDNS DSCVRY] {e.__class__.__name__} occured while parsing mdns_data:\n {mdns_data}\n"
                                f"Exception: \n{e}")
                            return

                        hostname = mdns_data.get('hostname')
                        interfaces = mdns_data.get('interfaces', [])
                        # interfaces = json.loads(properties[b'interfaces'].decode("utf-8"))

                        log_out = json.dumps(mdns_data,
                                             indent=4,
                                             sort_keys=True)
                        log.debug(
                            f'[MDNS DSCVRY] {hostname} Properties Discovered via mdns:\n{log_out}'
                        )

                        rem_ip = mdns_data.get('rem_ip')
                        if not rem_ip:
                            if len(mdns_data.get('interfaces', [])) == 1:
                                rem_ip = [
                                    interfaces[i]['ip'] for i in interfaces
                                ]
                                rem_ip = rem_ip[0]
                            else:
                                rem_ip = None if hostname not in cpi.remotes.data or 'rem_ip' not in cpi.remotes.data[hostname] \
                                    else cpi.remotes.data[hostname]['rem_ip']

                        cur_known_adapters = cpi.remotes.data.get(
                            hostname, {
                                'adapters': None
                            }).get('adapters')

                        # -- Log new entry only if this is the first time it's been discovered --
                        if hostname not in self.d_discovered:
                            self.d_discovered.append(hostname)
                            log.info(
                                '[MDNS DSCVRY] {}({}) Discovered via mdns'.
                                format(hostname,
                                       rem_ip if rem_ip is not None else '?'))

                        from_mdns_adapters = mdns_data.get('adapters')
                        mdns_data['rem_ip'] = rem_ip
                        mdns_data[
                            'adapters'] = from_mdns_adapters if from_mdns_adapters else cur_known_adapters
                        mdns_data['source'] = 'mdns'
                        mdns_data['upd_time'] = int(time.time())
                        mdns_data = {hostname: mdns_data}

                        # update from API only if no adapter data exists either in cache or from mdns that triggered this
                        # adapter data is updated on menu_launch either way
                        if (not mdns_data[hostname]['adapters'] and hostname not in self.no_adapters) or \
                                hostname not in cpi.remotes.data:
                            log.info(
                                f"[MDNS DSCVRY] {info.server.split('.')[0]} provided no adapter data Collecting via API"
                            )
                            # TODO check this don't think needed had a hung process on one of my Pis added it to be safe
                            try:
                                # TODO we are setting update time here so always result in a cache update with the restart timer
                                res = cpi.remotes.api_reachable(
                                    hostname, mdns_data[hostname])
                                update_cache = res.update
                                if not res.data.get('adapters'):
                                    self.no_adapters.append(hostname)
                                elif hostname in self.no_adapters:
                                    self.no_adapters.remove(hostname)
                                mdns_data[hostname] = res.data
                                # reachable = res.reachable
                            except Exception as e:
                                log.error(
                                    f'Exception occurred verifying reachability via API for {hostname}:\n{e}'
                                )

                        if self.show:
                            if hostname in self.discovered:
                                self.discovered.remove(hostname)
                            self.discovered.append('{}{}'.format(
                                hostname, '*' if update_cache else ''))
                            print(hostname +
                                  '({}) Discovered via mdns.'.format(
                                      rem_ip if rem_ip is not None else '?'))

                            try:
                                print('{}\n{}\n{}'.format(
                                    'mdns: None' if from_mdns_adapters is None
                                    else 'mdns: {}'.format([
                                        d.replace('/dev/', '')
                                        for d in from_mdns_adapters
                                    ] if not isinstance(
                                        from_mdns_adapters, list) else [
                                            d['dev'].replace('/dev/', '')
                                            for d in from_mdns_adapters
                                        ]),
                                    'api (mdns trigger): None'
                                    if not mdns_data[hostname]['adapters'] else
                                    'api (mdns trigger): {}'.format(  # NoQA
                                        [
                                            d.replace('/dev/', '') for d in
                                            mdns_data[hostname]['adapters']
                                        ] if not isinstance(
                                            mdns_data[hostname]['adapters'],
                                            list) else [
                                                d['dev'].replace('/dev/', '')
                                                for d in mdns_data[hostname]
                                                ['adapters']
                                            ]),
                                    'cache: None' if cur_known_adapters is None
                                    else 'cache: {}'.format([
                                        d.replace('/dev/', '')
                                        for d in cur_known_adapters
                                    ] if not isinstance(
                                        cur_known_adapters, list) else [
                                            d['dev'].replace('/dev/', '')
                                            for d in cur_known_adapters
                                        ])))
                            except TypeError as e:
                                print(f'EXCEPTION: {e}')

                            print(
                                f'\nDiscovered ConsolePis: {self.discovered}')
                            print("press Ctrl-C to exit...\n")

                        log.debugv(
                            '[MDNS DSCVRY] {} Final data set:\n{}'.format(
                                hostname,
                                json.dumps(mdns_data, indent=4,
                                           sort_keys=True)))

                        # TODO could probably just put the call to cache update in the api_reachable method
                        if update_cache:
                            if 'hostname' in mdns_data[hostname]:
                                del mdns_data[hostname]['hostname']
                            cpi.remotes.data = cpi.remotes.update_local_cloud_file(
                                remote_consoles=mdns_data)
                            log.info(
                                f'[MDNS DSCVRY] {hostname} Local Cache Updated after mdns discovery'
                            )
                    else:
                        log.warning(
                            f'[MDNS DSCVRY] {name}: No properties found'
                        )  # TODO Verify name is useful here
            else:
                log.warning(f'[MDNS DSCVRY] {info}: No info found')
Example #9
0
    def auto_pwron_thread(self, pwr_key):
        """Ensure any outlets linked to device are powered on

        Called by consolepi_menu exec_menu function and remote_launcher (for sessions to remotes)
        when a connection initiated with adapter.  Powers any linked outlets associated with the
        adapter on.

        params:
            menu_dev:str, The tty device user is connecting to.
        Returns:
            No Return - Updates class attributes
        """
        if self.wait_for_threads("init"):
            return

        outlets = self.pwr.data
        if "linked" not in outlets:
            _msg = "Error linked key not found in outlet dict\nUnable to perform auto power on"
            log.show(_msg, show=True)
            return

        if not outlets["linked"].get(pwr_key):
            return

        # -- // Perform Auto Power On (if not already on) \\ --
        for o in outlets["linked"][pwr_key]:
            outlet = outlets["defined"].get(o.split(":")[0])
            if outlet:
                ports = [] if ":" not in o else json.loads(
                    o.replace("'", '"').split(":")[1])
                _addr = outlet["address"]
            else:
                log.error(
                    f"Skipping Auto Power On {pwr_key} for {o}. Unable to pull outlet details from defined outlets.",
                    show=True,
                )
                log.debugv(f"Outlet Dict:\n{json.dumps(outlets)}")
                continue

            # -- // DLI web power switch Auto Power On \\ --
            #
            # TODO combine all ports from same pwr_key and sent to pwr_toggle once
            # TODO Update outlet if return is OK, then run refresh in the background to validate
            # TODO Add class attribute to cpi_menu ~ cpi_menu.new_data = "power", "main", etc
            #      Then in wait_for_input run loop to check for updates and re-display menu
            # TODO power_menu and dli_menu wait_for_threads auto power ... check cpiexec.autopwr_wait first
            #
            if outlet["type"].lower() == "dli":
                for p in ports:
                    log.debug(
                        f"[Auto PwrOn] Power ON {pwr_key} Linked Outlet {outlet['type']}:{_addr} p{p}"
                    )

                    if not outlet["is_on"][p][
                            "state"]:  # This is just checking what's in the dict not querying the DLI
                        r = self.pwr.pwr_toggle(outlet["type"],
                                                _addr,
                                                desired_state=True,
                                                port=p)
                        if isinstance(r, bool):
                            if r:
                                threading.Thread(
                                    target=self.outlet_update,
                                    kwargs={
                                        "refresh": True,
                                        "upd_linked": True
                                    },
                                    name="auto_pwr_refresh_dli",
                                ).start()
                                self.autopwr_wait = True
                        else:
                            log.warning(
                                f"{pwr_key} Error operating linked outlet @ {o}",
                                show=True,
                            )

            # -- // esphome Auto Power On \\ --
            elif outlet["type"].lower() == "esphome":
                for p in ports:
                    log.debug(
                        f"[Auto PwrOn] Power ON {pwr_key} Linked Outlet {outlet['type']}:{_addr} p{p}"
                    )
                    if not outlet["is_on"][p][
                            "state"]:  # This is just checking what's in the dict
                        r = self.pwr.pwr_toggle(outlet["type"],
                                                _addr,
                                                desired_state=True,
                                                port=p)
                        if isinstance(r, bool):
                            self.pwr.data["defined"][o.split(
                                ":")[0]]["is_on"][p]["state"] = r
                        else:
                            log.show(r)
                            log.warning(
                                f"{pwr_key} Error operating linked outlet @ {o}",
                                show=True,
                            )

            # -- // GPIO & TASMOTA Auto Power On \\ --
            else:
                log.debug(
                    f"[Auto PwrOn] Power ON {pwr_key} Linked Outlet {outlet['type']}:{_addr}"
                )
                r = self.pwr.pwr_toggle(
                    outlet["type"],
                    _addr,
                    desired_state=True,
                    noff=outlet.get("noff", True)
                    if outlet["type"].upper() == "GPIO" else True,
                )
                if isinstance(r, int) and r > 1:  # return is an error
                    r = False
                else:  # return is bool which is what we expect
                    if r:
                        self.pwr.data["defined"][o]["state"] = r
                        self.autopwr_wait = True
                        # self.pwr.pwr_get_outlets(upd_linked=True)
                    else:
                        # self.config.log_and_show(f"Error operating linked outlet {o}:{outlet['address']}", log=log.warning)
                        log.show(
                            f"Error operating linked outlet {o}:{outlet['address']}",
                            show=True,
                        )