def __init__(self, local, cpiexec): self.cpiexec = cpiexec self.pop_list = [] self.old_api_log_sent = False self.log_sym_warn = log_sym.WARNING.value self.log_sym_error = log_sym.ERROR.value self.local = local self.connected = False self.cache_update_pending = False self.spin = Halo(spinner="dots") self.cloud = None # Set in refresh method if reachable self.do_cloud = config.cfg.get("cloud", False) CLOUD_CREDS_FILE = config.static.get("CLOUD_CREDS_FILE") if not CLOUD_CREDS_FILE: self.no_creds_error() if self.do_cloud and config.cloud_svc == "gdrive": if utils.is_reachable("www.googleapis.com", 443): self.local_only = False if not utils.valid_file(CLOUD_CREDS_FILE): self.no_creds_error() else: log.warning( f"failed to connect to {config.cloud_svc} - operating in local only mode", show=True, ) self.local_only = True self.data = self.get_remote(data=config.remote_update( )) # re-get cloud.json to capture any updates via mdns
def wait_for_boot(): while True: try: if utils.is_reachable(_h, _p, silent=True): break else: time.sleep(3) except KeyboardInterrupt: self.autopwr_wait = False log.show("Connection Aborted") break
def auth(self): if utils.is_reachable('www.googleapis.com', 443): try: if self.creds is None: self.creds = self.get_credentials() if self.sheets_svc is None: self.sheets_svc = discovery.build('sheets', 'v4', credentials=self.creds, cache_discovery=False) if self.file_id is None: self.file_id = self.get_file_id() if self.file_id is None: self.file_id = self.create_sheet() return True except (ConnectionError, TimeoutError, OSError) as e: log.error('Exception Occurred Connecting to Gdrive {}'.format(e)) return False else: log.error('Google Drive is not reachable - Aborting') return False
def gen_copy_key(self, rem_data=None): """Generate public ssh key and distribute to remote ConsolePis Keyword Arguments: rem_data {tuple or list of tuples} -- each tuple should have 3 items 0: hostname of remote, 1: rem_ip, 3: rem_user (default: {None}) Returns: {list} -- list of any errors reported, could be informational """ hostname = self.local.hostname loc_user = self.local.user loc_home = self.local.loc_home # -- generate local key file if it doesn't exist # TODO pathlib if not os.path.isfile(loc_home + "/.ssh/id_rsa"): print("\nNo Local ssh cert found, generating...\n") utils.do_shell_cmd( f'sudo -u {loc_user} ssh-keygen -m pem -t rsa -C "{loc_user}@{hostname}"', timeout=360, ) # -- copy keys to remote(s) if not isinstance(rem_data, list): rem_data = [rem_data] return_list = [] for _rem in rem_data: rem, rem_ip, rem_user = _rem print( self.menu.format_line( "{{magenta}}Attempting to copy ssh cert to " + rem + "{{norm}}").text) if not utils.is_reachable(rem_ip, 22, timeout=5, silent=True): return_list.append(f"{rem}: is not reachable... Skipped") else: ret = utils.do_shell_cmd( f"sudo -u {loc_user} ssh-copy-id {rem_user}@{rem_ip}", timeout=60 # 360 ) if ret is not None: return_list.append("{}: {}".format(rem, ret)) return return_list
def main(): cpi = ConsolePi() cloud_svc = config.cfg.get("cloud_svc", "error") local = cpi.local remotes = cpi.remotes cpiexec = cpi.cpiexec log.info('[CLOUD TRIGGER (IP)]: Cloud Update triggered by IP Update') CLOUD_CREDS_FILE = config.static.get( "CLOUD_CREDS_FILE", '/etc/ConsolePi/cloud/gdrive/.credentials/credentials.json') if not utils.is_reachable("www.googleapis.com", 443): log.error(f"Not Updating {cloud_svc} due to connection failure") sys.exit(1) if not utils.valid_file(CLOUD_CREDS_FILE): log.error('Credentials file not found or invalid') sys.exit(1) # -- // Get details from Google Drive - once populated will skip \\ -- if cloud_svc == "gdrive" and remotes.cloud is None: remotes.cloud = GoogleDrive(hostname=local.hostname) if cpiexec.wait_for_threads(thread_type="remotes") and ( config.power and cpiexec.wait_for_threads(name="_toggle_refresh")): log.error( 'IP Change Cloud Update Trigger: TimeOut Waiting for Threads to Complete' ) remote_consoles = remotes.cloud.update_files(local.data) if remote_consoles and "Gdrive-Error:" in remote_consoles: log.error(remote_consoles) else: for r in remote_consoles: # -- Convert Any Remotes with old API schema to new API schema -- if isinstance(remote_consoles[r].get("adapters", {}), list): remote_consoles[r]["adapters"] = remotes.convert_adapters( remote_consoles[r]["adapters"]) log.warning( f"Adapter data for {r} retrieved from cloud in old API format... Converted" ) if len(remote_consoles) > 0: remotes.update_local_cloud_file(remote_consoles)
headers = { 'User-Agent': 'ConsolePi/version', 'Accept': '*/*', 'Cache-Control': 'no-cache', 'Host': '{}:5000'.format(ip), 'accept-encoding': 'gzip, deflate', 'Connection': 'keep-alive', 'cache-control': 'no-cache' } try: response = requests.request("GET", url, headers=headers) log.info('[DHCP TRIGGER] Response from {}[{}]: \n{}'.format( ip, response.status_code, response.text)) except Exception: pass # TODO get/post info from/to ConsolePi that just connected via API # Update local cloud cache # oobm Discovery & ZTP trigger elif vendor is not None and iface is not None and iface == 'eth0': for _ in match: if _ in vendor: if utils.is_reachable(ip, 22): log.info('{} is reachable via ssh @ {}'.format(_, ip)) elif utils.is_reachable(ip, 23): log.info('{} is reachable via telnet @ {}'.format(_, ip)) # TODO add connection to reachable ssh/telnet on eth if oobm is enabled in config # TODO add option to ztp via jinja2 templates (longer term goal)