def Run(self, args): command_list = args.command.split(' ') if args.command else ['bash -l'] project = properties.VALUES.core.project.Get() connection_info = util.PrepareEnvironment(args) command = ssh.SSHCommand( remote=ssh.Remote(host=connection_info.host, user=connection_info.user), port=six.text_type(connection_info.port), identity_file=connection_info.key, remote_command=(['DEVSHELL_PROJECT_ID=' + project] if project else []) + command_list, extra_flags=args.ssh_flag, tty=not args.command, options={'StrictHostKeyChecking': 'no'}, ) if args.dry_run: log.Print(' '.join(command.Build(connection_info.ssh_env))) else: self.done = threading.Event() thread = threading.Thread(target=self.Reauthorize, args=()) thread.daemon = True thread.start() command.Run(connection_info.ssh_env) self.done.set()
def PreliminarilyVerifyInstance(self, instance_id, remote, identity_file, options): """Verify the instance's identity by connecting and running a command. Args: instance_id: str, id of the compute instance. remote: ssh.Remote, remote to connect to. identity_file: str, optional key file. options: dict, optional ssh options. Raises: ssh.CommandError: The ssh command failed. core_exceptions.NetworkIssueError: The instance id does not match. """ metadata_id_url = ( 'http://metadata.google.internal/computeMetadata/v1/instance/id') # Exit codes 255 and 1 are taken by OpenSSH and PuTTY. # 23 chosen by fair dice roll. remote_command = [ '[ `curl "{}" -H "Metadata-Flavor: Google" -q` = {} ] || exit 23'. format(metadata_id_url, instance_id) ] cmd = ssh.SSHCommand(remote, identity_file=identity_file, options=options, remote_command=remote_command) return_code = cmd.Run(self.env, force_connect=True) # pytype: disable=attribute-error if return_code == 0: return elif return_code == 23: raise core_exceptions.NetworkIssueError( 'Established connection with host {} but was unable to ' 'confirm ID of the instance.'.format(remote.host)) raise ssh.CommandError(cmd, return_code=return_code)
def Run(self, args): """Connect to a running flex instance. Args: args: argparse.Namespace, the args the command was invoked with. Raises: InvalidInstanceTypeError: The instance is not supported for SSH. MissingVersionError: The version specified does not exist. MissingInstanceError: The instance specified does not exist. UnattendedPromptError: Not running in a tty. OperationCancelledError: User cancelled the operation. ssh.CommandError: The SSH command exited with SSH exit code, which usually implies that a connection problem occurred. Returns: int, The exit code of the SSH command. """ api_client = appengine_api_client.GetApiClientForTrack(self.ReleaseTrack()) env = ssh.Environment.Current() env.RequireSSH() keys = ssh.Keys.FromFilename() keys.EnsureKeysExist(overwrite=False) connection_details = ssh_common.PopulatePublicKey( api_client, args.service, args.version, args.instance, keys.GetPublicKey()) remote_command = containers.GetRemoteCommand(args.container, args.command) tty = containers.GetTty(args.container, args.command) cmd = ssh.SSHCommand( connection_details.remote, identity_file=keys.key_file, tty=tty, remote_command=remote_command, options=connection_details.options) return cmd.Run(env)
def RunSSHCommandToInstance(command_list, instance, user, args, ssh_helper, explicit_output_file=None, explicit_error_file=None, dry_run=False): """Runs a SSH command to a Google Compute Engine VM. Args: command_list: List with the ssh command to run. instance: The GCE VM object. user: The user to be used for the SSH command. args: The args used to call the gcloud instance. ssh_helper: ssh_utils.BaseSSHCLIHelper instance initialized for the command. explicit_output_file: Use this file for piping stdout of the SSH command, instead of using stdout. This is useful for capturing the command and analyzing it. explicit_error_file: Use this file for piping stdout of the SSH command, instead of using stdout. This is useful for capturing the command and analyzing it. dry_run: Whether or not this is a dry-run (only print, not run). Returns: The exit code of the SSH command Raises: ssh.CommandError: there was an error running a SSH command """ external_ip_address = ssh_utils.GetExternalIPAddress(instance) remote = ssh.Remote(external_ip_address, user) identity_file = None options = None if not args.plain: identity_file = ssh_helper.keys.key_file options = ssh_helper.GetConfig(ssh_utils.HostKeyAlias(instance), args.strict_host_key_checking) extra_flags = ssh.ParseAndSubstituteSSHFlags(args, remote, user) remainder = [] remote_command = containers.GetRemoteCommand(None, command_list) cmd = ssh.SSHCommand(remote, identity_file=identity_file, options=options, extra_flags=extra_flags, remote_command=remote_command, remainder=remainder) if dry_run: DryRunLog(' '.join(cmd.Build(ssh_helper.env))) return 0 return_code = cmd.Run(ssh_helper.env, force_connect=True, explicit_output_file=explicit_output_file, explicit_error_file=explicit_error_file) log.out.flush() return return_code
def Run(self, args): command_list = args.command.split(' ') if args.command else None connection_info = util.PrepareEnvironment(args) command = ssh.SSHCommand( remote=ssh.Remote(host=connection_info.host, user=connection_info.user), port=str(connection_info.port), identity_file=connection_info.key, remote_command=containers.GetRemoteCommand(None, command_list), extra_flags=args.ssh_flag, tty=containers.GetTty(None, args.command), options={'StrictHostKeyChecking': 'no'}, ) if args.dry_run: log.Print(' '.join(command.Build(connection_info.ssh_env))) else: command.Run(connection_info.ssh_env)
def SSHToInstance(self, args, instance): """Helper to manage authentication followed by SSH to the instance.""" args = self._DefaultArgsForSSH(args) external_nat = ssh_utils.GetExternalIPAddress(instance) log.status.Print( 'Trying to SSH to VM with NAT IP:{}'.format(external_nat)) remote = ssh.Remote(external_nat, ssh.GetDefaultSshUsername()) args.ssh_key_file = ssh.Keys.DEFAULT_KEY_FILE ssh_helper = ssh_utils.BaseSSHCLIHelper() ssh_helper.Run(args) identity_file = ssh_helper.keys.key_file user, _ = ssh_utils.GetUserAndInstance(args.name) host_keys = self._GetHostKeyFromInstance(args.zone, ssh_helper, instance) options = self._GetSSHOptions(args.name, ssh_helper, instance, host_keys) self._WaitForSSHKeysToPropagate(ssh_helper, remote, identity_file, user, instance, options) extra_flags = [] # Ctpu seems to be forwarding some other ports on what # seems like the TPU node. Need to understand better before enabling. if args.forward_ports: extra_flags.extend([ '-A', '-L', '6006:localhost:6006', '-L', '8888:localhost:8888' ]) ssh_cmd_args = { 'remote': remote, 'identity_file': identity_file, 'options': options, 'extra_flags': extra_flags } cmd = ssh.SSHCommand(**ssh_cmd_args) # Errors from SSH itself result in an ssh.CommandError being raised return_code = cmd.Run( ssh_helper.env, force_connect=properties.VALUES.ssh.putty_force_connect.GetBool()) if return_code: # This is the return code of the remote command. Problems with SSH itself # will result in ssh.CommandError being raised above. sys.exit(return_code)
def Run(self, args): command_list = args.command.split(' ') if args.command else ['bash -l'] project = properties.VALUES.core.project.Get() connection_info = util.PrepareEnvironment(args) command = ssh.SSHCommand( remote=ssh.Remote(host=connection_info.host, user=connection_info.user), port=str(connection_info.port), identity_file=connection_info.key, remote_command=(['DEVSHELL_PROJECT_ID=' + project] if project else []) + command_list, extra_flags=args.ssh_flag, tty=not args.command, options={'StrictHostKeyChecking': 'no'}, ) if args.dry_run: log.Print(' '.join(command.Build(connection_info.ssh_env))) else: command.Run(connection_info.ssh_env)
def PreliminarilyVerifyInstance(self, instance_id, remote, identity_file, options): """Verify the instance's identity by connecting and running a command. Args: instance_id: str, id of the compute instance. remote: ssh.Remote, remote to connect to. identity_file: str, optional key file. options: dict, optional ssh options. Raises: ssh.CommandError: The ssh command failed. core_exceptions.NetworkIssueError: The instance id does not match. """ if not properties.VALUES.ssh.verify_internal_ip.GetBool(): log.warning( 'Skipping internal IP verification connection and connecting to [{}] ' 'in the current subnet. This may be the wrong host if the instance ' 'is in a different subnet!'.format(remote.host)) return metadata_id_url = ( 'http://metadata.google.internal/computeMetadata/v1/instance/id') # Exit codes 255 and 1 are taken by OpenSSH and PuTTY. # 23 chosen by fair dice roll. remote_command = [ '[ `curl "{}" -H "Metadata-Flavor: Google" -q` = {} ] || exit 23' .format(metadata_id_url, instance_id)] cmd = ssh.SSHCommand(remote, identity_file=identity_file, options=options, remote_command=remote_command) return_code = cmd.Run( self.env, force_connect=properties.VALUES.ssh.putty_force_connect.GetBool()) if return_code == 0: return elif return_code == 23: raise core_exceptions.NetworkIssueError( 'Established connection with host {} but was unable to ' 'confirm ID of the instance.'.format(remote.host)) raise ssh.CommandError(cmd, return_code=return_code)
def Run(self, args): if not args.authorize_session: log.Print( 'Automatic authentication with GCP CLI tools in Cloud Shell is ' 'disabled. To enable, please rerun command with ' '`--authorize-session` flag.') command_list = args.command.split(' ') if args.command else ['bash -l'] project = properties.VALUES.core.project.Get() connection_info = util.PrepareEnvironment(args) if args.authorize_session: util.AuthorizeEnvironment() command = ssh.SSHCommand( remote=ssh.Remote(host=connection_info.host, user=connection_info.user), port=six.text_type(connection_info.port), identity_file=connection_info.key, remote_command=(['DEVSHELL_PROJECT_ID=' + project] if project else []) + command_list, extra_flags=args.ssh_flag, tty=not args.command, options={'StrictHostKeyChecking': 'no'}, ) if args.dry_run: elems = command.Build(connection_info.ssh_env) log.Print(' '.join([six.moves.shlex_quote(elem) for elem in elems])) elif args.authorize_session: self.done = threading.Event() thread = threading.Thread(target=self.Reauthorize, args=()) thread.daemon = True thread.start() command.Run(connection_info.ssh_env) self.done.set() else: command.Run(connection_info.ssh_env)
def Run(self, args): """See ssh_utils.BaseSSHCLICommand.Run.""" on_prem = ( args.IsKnownAndSpecified('network') and args.IsKnownAndSpecified('region')) if on_prem: args.plain = True # These two lines are needed to ensure reauth is performed as needed, even # for on-prem, which doesn't use the resulting variables. holder = base_classes.ComputeApiHolder(self.ReleaseTrack()) client = holder.client ssh_helper = ssh_utils.BaseSSHCLIHelper() ssh_helper.Run(args) oslogin_state = ssh.OsloginState() if on_prem: user, ip = ssh_utils.GetUserAndInstance(args.user_host) remote = ssh.Remote(ip, user) iap_tunnel_args = iap_tunnel.CreateOnPremSshTunnelArgs( args, self.ReleaseTrack(), ip) instance_address = ip internal_address = ip else: user, instance_name = ssh_utils.GetUserAndInstance(args.user_host) instance_ref = instance_flags.SSH_INSTANCE_RESOLVER.ResolveResources( [instance_name], compute_scope.ScopeEnum.ZONE, args.zone, holder.resources, scope_lister=instance_flags.GetInstanceZoneScopeLister(client))[0] instance = ssh_helper.GetInstance(client, instance_ref) project = ssh_helper.GetProject(client, instance_ref.project) host_keys = ssh_helper.GetHostKeysFromGuestAttributes( client, instance_ref, instance, project) iap_tunnel_args = iap_tunnel.CreateSshTunnelArgs( args, self.ReleaseTrack(), instance_ref, ssh_utils.GetExternalInterface(instance, no_raise=True)) internal_address = ssh_utils.GetInternalIPAddress(instance) if args.troubleshoot: log.status.Print(TROUBLESHOOT_HEADER.format( instance_ref, args.zone or instance_ref.zone, datetime.datetime.now() )) RunTroubleshooting(project, args.zone or instance_ref.zone, instance, iap_tunnel_args) return if not host_keys and host_keys is not None: log.debug('Unable to retrieve host keys from instance metadata. ' 'Continuing.') expiration, expiration_micros = ssh_utils.GetSSHKeyExpirationFromArgs( args) if args.plain: oslogin_state.oslogin_enabled = False else: public_key = ssh_helper.keys.GetPublicKey().ToEntry( include_comment=True) # If there is an '@' symbol in the user_host arg, the user is requesting # to connect as a specific user. This may get overridden by OS Login. username_requested = '@' in args.user_host oslogin_state = ssh.GetOsloginState( instance, project, user, public_key, expiration_micros, self.ReleaseTrack(), username_requested=username_requested) user = oslogin_state.user log.debug(oslogin_state) if iap_tunnel_args: # IAP Tunnel only uses instance_address for the purpose of --ssh-flag # substitution. In this case, dest_addr doesn't do much, it just matches # against entries in the user's ssh_config file. It's best to use # something unique to avoid false positive matches, thus we use # HostKeyAlias. instance_address = internal_address dest_addr = ssh_utils.HostKeyAlias(instance) elif args.internal_ip: instance_address = internal_address dest_addr = instance_address else: instance_address = ssh_utils.GetExternalIPAddress(instance) dest_addr = instance_address remote = ssh.Remote(dest_addr, user) # identity_file_list will be None if security keys are not enabled. identity_file_list = ssh.WriteSecurityKeys(oslogin_state) identity_file = None options = None if not args.plain: if not identity_file_list: identity_file = ssh_helper.keys.key_file options = ssh_helper.GetConfig(ssh_utils.HostKeyAlias(instance), args.strict_host_key_checking, host_keys_to_add=host_keys) extra_flags = ssh.ParseAndSubstituteSSHFlags(args, remote, instance_address, internal_address) remainder = [] if args.ssh_args: remainder.extend(args.ssh_args) # Transform args.command into arg list or None if no command command_list = args.command.split(' ') if args.command else None tty = containers.GetTty(args.container, command_list) remote_command = containers.GetRemoteCommand(args.container, command_list) # Do not include default port since that will prevent users from # specifying a custom port (b/121998342). ssh_cmd_args = {'remote': remote, 'identity_file': identity_file, 'options': options, 'extra_flags': extra_flags, 'remote_command': remote_command, 'tty': tty, 'iap_tunnel_args': iap_tunnel_args, 'remainder': remainder, 'identity_list': identity_file_list} cmd = ssh.SSHCommand(**ssh_cmd_args) if args.dry_run: # Add quotes around any arguments that contain spaces. log.out.Print(' '.join('"{0}"'.format(arg) if ' ' in arg else arg for arg in cmd.Build(ssh_helper.env))) return # Raise errors if instance requires a security key but the local # envionment doesn't support them. This is after the 'dry-run' because # we want to allow printing the command regardless. if self.enable_security_keys: ssh_utils.ConfirmSecurityKeyStatus(oslogin_state) if args.plain or oslogin_state.oslogin_enabled: keys_newly_added = False else: keys_newly_added = ssh_helper.EnsureSSHKeyExists( client, remote.user, instance, project, expiration=expiration) if keys_newly_added: poller = ssh_utils.CreateSSHPoller(remote, identity_file, options, iap_tunnel_args, extra_flags=extra_flags) log.status.Print('Waiting for SSH key to propagate.') # TODO(b/35355795): Don't force_connect try: poller.Poll( ssh_helper.env, force_connect=properties.VALUES.ssh.putty_force_connect.GetBool()) except retry.WaitException: raise ssh_utils.NetworkError() if args.internal_ip and not on_prem: ssh_helper.PreliminarilyVerifyInstance(instance.id, remote, identity_file, options) # Errors from SSH itself result in an ssh.CommandError being raised try: return_code = cmd.Run( ssh_helper.env, force_connect=properties.VALUES.ssh.putty_force_connect.GetBool()) except ssh.CommandError as e: if not on_prem: log.status.Print(self.createRecommendMessage(args, instance_name, instance_ref, project)) raise e if return_code: # This is the return code of the remote command. Problems with SSH itself # will result in ssh.CommandError being raised above. sys.exit(return_code)
def Run(self, args): """See ssh_utils.BaseSSHCLICommand.Run.""" holder = base_classes.ComputeApiHolder(self.ReleaseTrack()) client = holder.client ssh_helper = ssh_utils.BaseSSHCLIHelper() ssh_helper.Run(args) user, instance_name = ssh_utils.GetUserAndInstance(args.user_host) instance_ref = instance_flags.SSH_INSTANCE_RESOLVER.ResolveResources( [instance_name], compute_scope.ScopeEnum.ZONE, args.zone, holder.resources, scope_lister=instance_flags.GetInstanceZoneScopeLister(client))[0] instance = ssh_helper.GetInstance(client, instance_ref) project = ssh_helper.GetProject(client, instance_ref.project) if self.get_host_keys: host_keys = ssh_helper.GetHostKeysFromGuestAttributes( client, instance_ref) if not host_keys: log.warning('Unable to retrieve host keys from instance metadata. ' 'Continuing.') else: host_keys = {} expiration, expiration_micros = ssh_utils.GetSSHKeyExpirationFromArgs(args) if args.plain: use_oslogin = False else: public_key = ssh_helper.keys.GetPublicKey().ToEntry(include_comment=True) user, use_oslogin = ssh.CheckForOsloginAndGetUser( instance, project, user, public_key, expiration_micros, self.ReleaseTrack()) iap_tunnel_args = iap_tunnel.SshTunnelArgs.FromArgs( args, self.ReleaseTrack(), instance_ref, ssh_utils.GetExternalInterface(instance, no_raise=True)) internal_address = ssh_utils.GetInternalIPAddress(instance) if iap_tunnel_args: # IAP Tunnel only uses instance_address for the purpose of --ssh-flag # substitution. In this case, dest_addr doesn't do much, it just matches # against entries in the user's ssh_config file. It's best to use # something unique to avoid false positive matches, thus we use # HostKeyAlias. instance_address = internal_address dest_addr = ssh_utils.HostKeyAlias(instance) elif args.internal_ip: instance_address = internal_address dest_addr = instance_address else: instance_address = ssh_utils.GetExternalIPAddress(instance) dest_addr = instance_address remote = ssh.Remote(dest_addr, user) identity_file = None options = None if not args.plain: identity_file = ssh_helper.keys.key_file options = ssh_helper.GetConfig(ssh_utils.HostKeyAlias(instance), args.strict_host_key_checking, host_keys_to_add=host_keys) extra_flags = ssh.ParseAndSubstituteSSHFlags(args, remote, instance_address, internal_address) remainder = [] if args.ssh_args: remainder.extend(args.ssh_args) # Transform args.command into arg list or None if no command command_list = args.command.split(' ') if args.command else None tty = containers.GetTty(args.container, command_list) remote_command = containers.GetRemoteCommand(args.container, command_list) # Do not include default port since that will prevent users from # specifying a custom port (b/121998342). ssh_cmd_args = {'remote': remote, 'identity_file': identity_file, 'options': options, 'extra_flags': extra_flags, 'remote_command': remote_command, 'tty': tty, 'iap_tunnel_args': iap_tunnel_args, 'remainder': remainder} cmd = ssh.SSHCommand(**ssh_cmd_args) if args.dry_run: log.out.Print(' '.join(cmd.Build(ssh_helper.env))) return if args.plain or use_oslogin: keys_newly_added = False else: keys_newly_added = ssh_helper.EnsureSSHKeyExists( client, remote.user, instance, project, expiration=expiration) if keys_newly_added: poller = ssh_utils.CreateSSHPoller(remote, identity_file, options, iap_tunnel_args, extra_flags=extra_flags) log.status.Print('Waiting for SSH key to propagate.') # TODO(b/35355795): Don't force_connect try: poller.Poll(ssh_helper.env, force_connect=True) except retry.WaitException: raise ssh_utils.NetworkError() if args.internal_ip: ssh_helper.PreliminarilyVerifyInstance(instance.id, remote, identity_file, options) # Errors from SSH itself result in an ssh.CommandError being raised return_code = cmd.Run(ssh_helper.env, force_connect=True) if return_code: # This is the return code of the remote command. Problems with SSH itself # will result in ssh.CommandError being raised above. sys.exit(return_code)
def Run(self, args): """See ssh_utils.BaseSSHCLICommand.Run.""" holder = base_classes.ComputeApiHolder(self.ReleaseTrack()) client = holder.client ssh_helper = ssh_utils.BaseSSHCLIHelper() ssh_helper.Run(args) user, instance_name = ssh_utils.GetUserAndInstance(args.user_host) instance_ref = instance_flags.SSH_INSTANCE_RESOLVER.ResolveResources( [instance_name], compute_scope.ScopeEnum.ZONE, args.zone, holder.resources, scope_lister=instance_flags.GetInstanceZoneScopeLister(client))[0] instance = ssh_helper.GetInstance(client, instance_ref) project = ssh_helper.GetProject(client, instance_ref.project) if args.plain: use_oslogin = False else: public_key = ssh_helper.keys.GetPublicKey().ToEntry( include_comment=True) user, use_oslogin = ssh.CheckForOsloginAndGetUser( instance, project, user, public_key, self.ReleaseTrack()) if args.internal_ip: ip_address = ssh_utils.GetInternalIPAddress(instance) else: ip_address = ssh_utils.GetExternalIPAddress(instance) remote = ssh.Remote(ip_address, user) identity_file = None options = None if not args.plain: identity_file = ssh_helper.keys.key_file options = ssh_helper.GetConfig(ssh_utils.HostKeyAlias(instance), args.strict_host_key_checking) extra_flags = ssh.ParseAndSubstituteSSHFlags(args, remote, ip_address) remainder = [] if args.ssh_args: remainder.extend(args.ssh_args) # Transform args.command into arg list or None if no command command_list = args.command.split(' ') if args.command else None tty = containers.GetTty(args.container, command_list) remote_command = containers.GetRemoteCommand(args.container, command_list) target_remote = remote port = ssh_utils.DEFAULT_SSH_PORT ip_type = (ip.IpTypeEnum.INTERNAL if args.internal_ip else ip.IpTypeEnum.EXTERNAL) tunnel_helper = None interface = None if hasattr(args, 'tunnel_through_iap') and args.tunnel_through_iap: tunnel_helper, interface = ssh_utils.CreateIapTunnelHelper( args, instance_ref, instance, ip_type) tunnel_helper.StartListener() target_remote = ssh.Remote('localhost', user) port = tunnel_helper.GetLocalPort() cmd = ssh.SSHCommand(target_remote, port=str(port), identity_file=identity_file, options=options, extra_flags=extra_flags, remote_command=remote_command, tty=tty, remainder=remainder) if args.dry_run: log.out.Print(' '.join(cmd.Build(ssh_helper.env))) if tunnel_helper: tunnel_helper.StopListener() return if args.plain or use_oslogin: keys_newly_added = False else: keys_newly_added = ssh_helper.EnsureSSHKeyExists( client, remote.user, instance, project) if keys_newly_added: poller_tunnel_helper = None if tunnel_helper: poller_tunnel_helper, _ = ssh_utils.CreateIapTunnelHelper( args, instance_ref, instance, ip_type, interface=interface) poller_tunnel_helper.StartListener( accept_multiple_connections=True) poller = ssh_utils.CreateSSHPoller(remote, identity_file, options, poller_tunnel_helper, extra_flags=extra_flags) log.status.Print('Waiting for SSH key to propagate.') # TODO(b/35355795): Don't force_connect try: poller.Poll(ssh_helper.env, force_connect=True) except retry.WaitException: if tunnel_helper: tunnel_helper.StopListener() raise ssh_utils.NetworkError() finally: if poller_tunnel_helper: poller_tunnel_helper.StopListener() if args.internal_ip and not tunnel_helper: # The IAP Tunnel connection uses instance name and network interface name, # so do not need to additionally verify the instance. Also, the # SSHCommand used within the function does not support IAP Tunnels. ssh_helper.PreliminarilyVerifyInstance(instance.id, remote, identity_file, options) try: # Errors from SSH itself result in an ssh.CommandError being raised return_code = cmd.Run(ssh_helper.env, force_connect=True) finally: if tunnel_helper: tunnel_helper.StopListener() if return_code: # This is the return code of the remote command. Problems with SSH itself # will result in ssh.CommandError being raised above. sys.exit(return_code)
def Run(self, args): """See ssh_utils.BaseSSHCLICommand.Run.""" super(SshGA, self).Run(args) user, instance_name = ssh_utils.GetUserAndInstance( args.user_host, self._use_account_service, self.http) instance_ref = instance_flags.SSH_INSTANCE_RESOLVER.ResolveResources( [instance_name], compute_scope.ScopeEnum.ZONE, args.zone, self.resources, scope_lister=flags.GetDefaultScopeLister(self.compute_client))[0] instance = self.GetInstance(instance_ref) if self._use_internal_ip: ip_address = ssh_utils.GetInternalIPAddress(instance) else: ip_address = ssh_utils.GetExternalIPAddress(instance) remote = ssh.Remote(ip_address, user) identity_file = None options = None if not args.plain: identity_file = self.keys.key_file options = self.GetConfig(ssh_utils.HostKeyAlias(instance), args.strict_host_key_checking) extra_flags = [] remainder = [] if args.ssh_flag: for flag in args.ssh_flag: for flag_part in flag.split(): # We want grouping here dereferenced_flag = (flag_part.replace( '%USER%', remote.user).replace('%INSTANCE%', ip_address)) extra_flags.append(dereferenced_flag) if args.ssh_args: remainder.extend(args.ssh_args) tty = ssh_utils.GetTty(args.container, args.command) remote_command = ssh_utils.GetRemoteCommand(args.container, args.command) cmd = ssh.SSHCommand(remote, identity_file=identity_file, options=options, extra_flags=extra_flags, remote_command=remote_command, tty=tty, remainder=remainder) if args.dry_run: log.out.Print(' '.join(cmd.Build(self.env))) return if args.plain: keys_newly_added = False else: keys_newly_added = self.EnsureSSHKeyExists( remote.user, instance, instance_ref.project, use_account_service=self._use_account_service) if keys_newly_added: poller = ssh.SSHPoller( remote, identity_file=identity_file, options=options, extra_flags=extra_flags, max_wait_ms=ssh_utils.SSH_KEY_PROPAGATION_TIMEOUT_SEC) log.status.Print('Waiting for SSH key to propagate.') # TODO(b/35355795): Don't force_connect try: poller.Poll(self.env, force_connect=True) except retry.WaitException: raise ssh_utils.NetworkError() if self._use_internal_ip: self._PreliminarylyVerifyInstance(instance.id, remote, identity_file, options, extra_flags) return_code = cmd.Run(self.env, force_connect=True) if return_code: # Can't raise an exception because we don't want any "ERROR" message # printed; the output from `ssh` will be enough. sys.exit(return_code)
def Run(self, args): user, tpu_name = ssh_utils.GetUserAndInstance(args.user_tpu) # If zone is not set, retrieve the one from the config. if args.zone is None: args.zone = properties.VALUES.compute.zone.Get(required=True) # Validate the output path. if args.output_directory: if not args.command: raise exceptions.InvalidArgumentException( '--output_directory', 'cannot be specified without the `--command` ' 'flag. Please specify the `--command` flag or remove the ' '--output-directory flag.') output_directory_path = os.path.abspath( os.path.expandvars(os.path.expanduser(args.output_directory))) if not os.path.isdir(output_directory_path): raise exceptions.InvalidArgumentException( '--output_directory', 'Failed to find directory {}. Please create ' 'it or specify another directory'.format( output_directory_path)) # Retrieve the node. tpu = tpu_utils.TPUNode(self.ReleaseTrack()) node = tpu.Get(tpu_name, args.zone) if not tpu_utils.IsTPUVMNode(node): raise exceptions.BadArgumentException( 'TPU', 'this command is only available for Cloud TPU VM nodes. To access ' 'this node, please see ' 'https://cloud.google.com/tpu/docs/creating-deleting-tpus.') tpu_ssh_utils.ValidateTPUState(node.state, tpu.messages.Node.StateValueValuesEnum) worker_ips = tpu_ssh_utils.ParseWorkerFlag(args.worker, node.networkEndpoints, args.internal_ip) if len(worker_ips) > 1 and not args.command: raise exceptions.InvalidArgumentException( '--worker', 'cannot target multiple workers without the `--command` ' 'flag.') # Retrieve GuestAttributes. single_pod_worker = len( node.networkEndpoints) > 1 and len(worker_ips) == 1 if single_pod_worker: # Retrieve only that worker's GuestAttributes. worker_id = list(worker_ips)[0] guest_attributes_response = tpu.GetGuestAttributes( tpu_name, args.zone, six.text_type((worker_id))) host_key_suffixes = tpu_ssh_utils.GetHostKeySuffixes( guest_attributes_response.guestAttributes, len(node.networkEndpoints), worker_id) else: # Retrieve the GuestAttributes for all workers in that TPU. guest_attributes_response = tpu.GetGuestAttributes( tpu_name, args.zone) host_key_suffixes = tpu_ssh_utils.GetHostKeySuffixes( guest_attributes_response.guestAttributes) # Generate the public key. ssh_helper = ssh_utils.BaseSSHCLIHelper() ssh_helper.Run(args) public_key = ssh_helper.keys.GetPublicKey().ToEntry() project = tpu_utils.GetProject(self.ReleaseTrack(), ssh_helper) if not args.plain: # If there is an '@' symbol in the user_host arg, the user is requesting # to connect as a specific user. This may get overridden by OS Login. username_requested = '@' in args.user_tpu _, expiration_micros = ssh_utils.GetSSHKeyExpirationFromArgs(args) oslogin_state = ssh.GetOsloginState( None, project, user, public_key, expiration_micros, self.ReleaseTrack(), username_requested=username_requested, instance_enable_oslogin=tpu_ssh_utils.TpuHasOsLoginEnabled( node)) user = oslogin_state.user # Format the key correctly. public_key = '{1}:{0} {1}'.format(public_key, user) if not args.plain and not args.dry_run: tpu_ssh_utils.AddSSHKeyIfNeeded(project, tpu, node, tpu_name, args.zone, public_key) command_list = args.command.split(' ') if args.command else None remainder = [] if args.ssh_args: remainder.extend(args.ssh_args) if args.output_directory: log.status.Print( 'Preparing SSH command execution; output will be logged ' 'to {}'.format(output_directory_path)) instance_names = {} if (args.IsKnownAndSpecified('tunnel_through_iap') and args.tunnel_through_iap): # Retrieve the instance names from the GuestAttributes. for worker in worker_ips: # The GuestAttributes will only have one entry if we're targeting a # single worker. index = 0 if single_pod_worker else worker instance_name = tpu_ssh_utils.GetFromGuestAttributes( guest_attributes_response.guestAttributes, index, 'hostname') if instance_name is None: log.status.Print('Failed to connect to TPU.') log.status.Print(tpu_ssh_utils.IAP_TROUBLESHOOTING_HELP) raise tpu_exceptions.IapTunnelingUnavailable() instance_names[worker] = instance_name ssh_threads = [] exit_statuses = [None] * len(worker_ips) for worker, ips in worker_ips.items(): identity_file = None options = None if not args.plain: identity_file = ssh_helper.keys.key_file options = ssh_helper.GetConfig( tpu_ssh_utils.GetInstanceID(node.id, worker, host_key_suffixes), args.strict_host_key_checking, None) remote = ssh.Remote(ips.ip_address, user) extra_flags = ssh.ParseAndSubstituteSSHFlags( args, remote, ips.ip_address, ips.internal_address) iap_tunnel_args = None if (args.IsKnownAndSpecified('tunnel_through_iap') and args.tunnel_through_iap): # Retrieve the instance name from the GuestAttributes. instance_name = instance_names[worker] iap_tunnel_args = tpu_ssh_utils.CreateSshTunnelArgs( args, self.ReleaseTrack(), project, args.zone, instance_name) cmd = ssh.SSHCommand(remote=remote, identity_file=identity_file, remote_command=command_list, extra_flags=extra_flags, options=options, remainder=remainder, iap_tunnel_args=iap_tunnel_args) if args.dry_run: log.out.Print(' '.join(cmd.Build(ssh_helper.env))) continue output_file_writer = None if args.output_directory: output_file_writer = FileWriter('{}/{}.log'.format( output_directory_path, six.text_type(worker))) if len(worker_ips) > 1: # Run the command on multiple workers concurrently. ssh_threads.append( threading.Thread( target=tpu_ssh_utils.AttemptRunWithRetries, args=('SSH', worker, exit_statuses, cmd, ssh_helper.env, output_file_writer, True, SSHRunCmd))) ssh_threads[-1].start() else: # Run on a single worker. tpu_ssh_utils.AttemptRunWithRetries('SSH', worker, exit_statuses, cmd, ssh_helper.env, output_file_writer, False, SSHRunCmd) if len(worker_ips) > 1: # Wait for all the threads to complete. for i in range(len(ssh_threads)): ssh_threads[i].join() # Exit with a nonzero code, if there are any. # This ensures that if any command failed on a worker, we don't end up # returning 0 for a value. for status in exit_statuses: if status: sys.exit(status)
def SSHToInstance(self, args, instance): """Helper to manage authentication followed by SSH to the instance.""" args = self._DefaultArgsForSSH(args) external_nat = ssh_utils.GetExternalIPAddress(instance) log.status.Print( 'Trying to SSH to VM with NAT IP:{}'.format(external_nat)) args.ssh_key_file = ssh.Keys.DEFAULT_KEY_FILE ssh_helper = ssh_utils.BaseSSHCLIHelper() ssh_helper.Run(args) identity_file = ssh_helper.keys.key_file user, _ = ssh_utils.GetUserAndInstance(args.name) host_keys = self._GetHostKeyFromInstance(args.zone, ssh_helper, instance) options = self._GetSSHOptions(args.name, ssh_helper, instance, host_keys) public_key = ssh_helper.keys.GetPublicKey().ToEntry(include_comment=True) user, use_oslogin = ssh.CheckForOsloginAndGetUser( instance, ssh_helper.GetProject( self.client, properties.VALUES.core.project.Get(required=True)), user, public_key, None, self.release_track, username_requested=False) remote = ssh.Remote(external_nat, user) if not use_oslogin: self._WaitForSSHKeysToPropagate(ssh_helper, remote, identity_file, user, instance, options) extra_flags = [] # Ctpu seems to be forwarding some other ports on what # seems like the TPU node. Need to understand better before enabling. if args.forward_ports: extra_flags.extend( ['-A', '-L', '6006:localhost:6006', '-L', '8888:localhost:8888']) ssh_cmd_args = { 'remote': remote, 'identity_file': identity_file, 'options': options, 'extra_flags': extra_flags } cmd = ssh.SSHCommand(**ssh_cmd_args) max_attempts = 10 sleep_interval = 30 # Since the instance was just created, it can take a while for the instance # to be ready to accept ssh connections, therefore retry up to 5m. Doesn't # need to be backed off, regular interval retry is sufficient since we # aren't looking to throttle. for i in range(max_attempts): try: log.status.Print('SSH Attempt #{}...'.format(i)) # Errors from SSH itself result in an ssh.CommandError being raised return_code = cmd.Run( ssh_helper.env, force_connect=properties.VALUES.ssh.putty_force_connect.GetBool()) if return_code: # This is the return code of the remote command. # Problems with SSH itself will result in ssh.CommandError # being raised above. sys.exit(return_code) except ssh.CommandError as e: if i == max_attempts - 1: raise e log.status.Print( 'Retrying: SSH command error: {}'.format(six.text_type(e))) time.sleep(sleep_interval) continue break
def Run(self, args): """See ssh_utils.BaseSSHCLICommand.Run.""" holder = base_classes.ComputeApiHolder(self.ReleaseTrack()) client = holder.client ssh_helper = ssh_utils.BaseSSHCLIHelper() ssh_helper.Run(args) user, instance_name = ssh_utils.GetUserAndInstance(args.user_host) instance_ref = instance_flags.SSH_INSTANCE_RESOLVER.ResolveResources( [instance_name], compute_scope.ScopeEnum.ZONE, args.zone, holder.resources, scope_lister=instance_flags.GetInstanceZoneScopeLister(client))[0] instance = ssh_helper.GetInstance(client, instance_ref) project = ssh_helper.GetProject(client, instance_ref.project) if args.plain: use_oslogin = False else: user, use_oslogin = ssh_helper.CheckForOsloginAndGetUser( instance, project, user, self.ReleaseTrack()) if self._use_internal_ip: ip_address = ssh_utils.GetInternalIPAddress(instance) else: ip_address = ssh_utils.GetExternalIPAddress(instance) remote = ssh.Remote(ip_address, user) identity_file = None options = None if not args.plain: identity_file = ssh_helper.keys.key_file options = ssh_helper.GetConfig(ssh_utils.HostKeyAlias(instance), args.strict_host_key_checking) extra_flags = ssh.ParseAndSubstituteSSHFlags(args, remote, ip_address) remainder = [] if args.ssh_args: remainder.extend(args.ssh_args) # Transform args.command into arg list or None if no command command_list = args.command.split(' ') if args.command else None tty = containers.GetTty(args.container, command_list) remote_command = containers.GetRemoteCommand(args.container, command_list) cmd = ssh.SSHCommand(remote, identity_file=identity_file, options=options, extra_flags=extra_flags, remote_command=remote_command, tty=tty, remainder=remainder) if args.dry_run: log.out.Print(' '.join(cmd.Build(ssh_helper.env))) return if args.plain or use_oslogin: keys_newly_added = False else: keys_newly_added = ssh_helper.EnsureSSHKeyExists( client, remote.user, instance, project) if keys_newly_added: poller = ssh.SSHPoller( remote, identity_file=identity_file, options=options, extra_flags=extra_flags, max_wait_ms=ssh_utils.SSH_KEY_PROPAGATION_TIMEOUT_SEC) log.status.Print('Waiting for SSH key to propagate.') # TODO(b/35355795): Don't force_connect try: poller.Poll(ssh_helper.env, force_connect=True) except retry.WaitException: raise ssh_utils.NetworkError() if self._use_internal_ip: ssh_helper.PreliminarilyVerifyInstance(instance.id, remote, identity_file, options) return_code = cmd.Run(ssh_helper.env, force_connect=True) if return_code: # Can't raise an exception because we don't want any "ERROR" message # printed; the output from `ssh` will be enough. sys.exit(return_code)
def Run(self, args): """See ssh_utils.BaseSSHCommand.Run.""" holder = base_classes.ComputeApiHolder(self.ReleaseTrack()) cua_holder = base_classes.ComputeUserAccountsApiHolder( self.ReleaseTrack()) client = holder.client ssh_helper = ssh_utils.BaseSSHHelper() ssh_helper.Run(args) ssh_helper.keys.EnsureKeysExist(args.force_key_file_overwrite, allow_passphrase=True) remote = ssh.Remote.FromArg(args.user_host) if not remote: raise ssh_utils.ArgumentError( 'Expected argument of the form [USER@]INSTANCE. Received [{0}].' .format(args.user_host)) if not remote.user: remote.user = ssh.GetDefaultSshUsername() hostname = '[{0}]:{1}'.format(args.serial_port_gateway, CONNECTION_PORT) # Update google_compute_known_hosts file with published host key if args.serial_port_gateway == SERIAL_PORT_GATEWAY: http_client = http.Http() http_response = http_client.request(HOST_KEY_URL) known_hosts = ssh.KnownHosts.FromDefaultFile() if http_response[0]['status'] == '200': host_key = http_response[1].strip() known_hosts.Add(hostname, host_key, overwrite=True) known_hosts.Write() elif known_hosts.ContainsAlias(hostname): log.warn( 'Unable to download and update Host Key for [{0}] from [{1}]. ' 'Attempting to connect using existing Host Key in [{2}]. If ' 'the connection fails, please try again to update the Host ' 'Key.'.format(SERIAL_PORT_GATEWAY, HOST_KEY_URL, known_hosts.file_path)) else: known_hosts.Add(hostname, DEFAULT_HOST_KEY) known_hosts.Write() log.warn( 'Unable to download Host Key for [{0}] from [{1}]. To ensure ' 'the security of the SSH connetion, gcloud will attempt to ' 'connect using a hard-coded Host Key value. If the connection ' 'fails, please try again. If the problem persists, try ' 'updating gcloud and connecting again.'.format( SERIAL_PORT_GATEWAY, HOST_KEY_URL)) instance_ref = instance_flags.SSH_INSTANCE_RESOLVER.ResolveResources( [remote.host], compute_scope.ScopeEnum.ZONE, args.zone, holder.resources, scope_lister=instance_flags.GetInstanceZoneScopeLister(client))[0] instance = ssh_helper.GetInstance(client, instance_ref) project = ssh_helper.GetProject(client, instance_ref.project) # Determine the serial user, host tuple (remote) port = 'port={0}'.format(args.port) constructed_username_list = [ instance_ref.project, instance_ref.zone, instance_ref.Name(), remote.user, port ] if args.extra_args: for k, v in args.extra_args.items(): constructed_username_list.append('{0}={1}'.format(k, v)) serial_user = '******'.join(constructed_username_list) serial_remote = ssh.Remote(args.serial_port_gateway, user=serial_user) identity_file = ssh_helper.keys.key_file options = ssh_helper.GetConfig(hostname, strict_host_key_checking='yes') del options['HostKeyAlias'] cmd = ssh.SSHCommand(serial_remote, identity_file=identity_file, port=CONNECTION_PORT, options=options) if args.dry_run: log.out.Print(' '.join(cmd.Build(ssh_helper.env))) return ssh_helper.EnsureSSHKeyExists(client, cua_holder.client, remote.user, instance, project) # Don't wait for the instance to become SSHable. We are not connecting to # the instance itself through SSH, so the instance doesn't need to have # fully booted to connect to the serial port. Also, ignore exit code 255, # since the normal way to terminate the serial port connection is ~. and # that causes ssh to exit with 255. try: return_code = cmd.Run(ssh_helper.env, force_connect=True) except ssh.CommandError: return_code = 255 if return_code: sys.exit(return_code)
def Run(self, args): """Connect to a running flex instance. Args: args: argparse.Namespace, the args the command was invoked with. Raises: InvalidInstanceTypeError: The instance is not supported for SSH. MissingVersionError: The version specified does not exist. MissingInstanceError: The instance specified does not exist. UnattendedPromptError: Not running in a tty. OperationCancelledError: User cancelled the operation. ssh.CommandError: The SSH command exited with SSH exit code, which usually implies that a connection problem occurred. Returns: int, The exit code of the SSH command. """ api_client = appengine_api_client.GetApiClient() env = ssh.Environment.Current() env.RequireSSH() keys = ssh.Keys.FromFilename() keys.EnsureKeysExist(overwrite=False) try: version = api_client.GetVersionResource(service=args.service, version=args.version) except api_exceptions.NotFoundError: raise command_exceptions.MissingVersionError('{}/{}'.format( args.service, args.version)) version = version_util.Version.FromVersionResource(version, None) if version.environment is not util.Environment.FLEX: if version.environment is util.Environment.MANAGED_VMS: environment = 'Managed VMs' msg = 'Use `gcloud compute ssh` for Managed VMs instances.' else: environment = 'Standard' msg = None raise command_exceptions.InvalidInstanceTypeError(environment, msg) res = resources.REGISTRY.Parse( args.instance, params={ 'appsId': properties.VALUES.core.project.GetOrFail, 'versionsId': args.version, 'instancesId': args.instance, 'servicesId': args.service, }, collection='appengine.apps.services.versions.instances') rel_name = res.RelativeName() try: instance = api_client.GetInstanceResource(res) except api_exceptions.NotFoundError: raise command_exceptions.MissingInstanceError(rel_name) if not instance.vmDebugEnabled: log.warn(ENABLE_DEBUG_WARNING) console_io.PromptContinue(cancel_on_no=True, throw_if_unattended=True) user = ssh.GetDefaultSshUsername() remote = ssh.Remote(instance.vmIp, user=user) public_key = keys.GetPublicKey().ToEntry() ssh_key = '{user}:{key} {user}'.format(user=user, key=public_key) log.status.Print( 'Sending public key to instance [{}].'.format(rel_name)) api_client.DebugInstance(res, ssh_key) options = { 'IdentitiesOnly': 'yes', # No ssh-agent as of yet 'UserKnownHostsFile': ssh.KnownHosts.DEFAULT_PATH, 'CheckHostIP': 'no', 'HostKeyAlias': HOST_KEY_ALIAS.format(project=api_client.project, instance_id=args.instance) } cmd = ssh.SSHCommand(remote, identity_file=keys.key_file, options=options) if args.container: cmd.tty = True cmd.remote_command = ['container_exec', args.container, '/bin/sh'] return cmd.Run(env)