def WriteDockerConfig(structure): """Write out a complete set of Docker authorization entries. This is public only to facilitate testing. Args: structure: The dict of authorization mappings to write to the Docker configuration file. """ cfg, new_format = GetDockerConfig() if new_format: full_cfg, _ = _ReadFullDockerConfiguration() full_cfg['auths'] = structure contents = json.dumps(full_cfg, indent=2) else: contents = json.dumps(structure, indent=2) if platforms.OperatingSystem.Current( ) == platforms.OperatingSystem.WINDOWS: # On windows, there is no good way to atomically write this file. with files.OpenForWritingPrivate(cfg) as writer: writer.write(contents) return # This opens files with 0600, which are the correct permissions. with tempfile.NamedTemporaryFile(dir=os.path.dirname(cfg), delete=False) as tf: tf.write(contents) # This pattern atomically writes the file on non-Windows systems. os.rename(tf.name, cfg)
def _UpdateFileCache(self): """Check server if connected, write the result to file.""" gce_cache_path = config.Paths().GCECachePath() on_gce = self._CheckServer() with self.file_lock: with files.OpenForWritingPrivate(gce_cache_path) as gcecache_file: gcecache_file.write(str(on_gce))
def SaveCredentialsAsADC(creds, file_path): """Saves the credentials to the given file. Args: creds: client.OAuth2Credentials, obtained from a web flow or service account. file_path: str, file path to store credentials to. The file will be created. Raises: CredentialFileSaveError, on file io errors. """ google_creds = client.GoogleCredentials( creds.access_token, creds.client_id, creds.client_secret, creds.refresh_token, creds.token_expiry, creds.token_uri, creds.user_agent, creds.revoke_uri) try: with files.OpenForWritingPrivate(file_path) as f: json.dump(google_creds.serialization_data, f, sort_keys=True, indent=2, separators=(',', ': ')) except IOError as e: log.debug(e, exc_info=True) raise CredentialFileSaveError( 'Error saving Application Default Credentials: ' + str(e))
def SaveCredentialsAsADC(creds): """Saves the credentials to the given file. Args: creds: The credentials obtained from a web flow. Returns: str, The full path to the ADC file that was written. """ google_creds = client.GoogleCredentials( creds.access_token, creds.client_id, creds.client_secret, creds.refresh_token, creds.token_expiry, creds.token_uri, creds.user_agent, creds.revoke_uri) adc_file = ADCFilePath() try: with files.OpenForWritingPrivate(adc_file) as f: json.dump(google_creds.serialization_data, f, sort_keys=True, indent=2, separators=(',', ': ')) except IOError as e: log.debug(e, exc_info=True) raise CredentialFileSaveError( 'Error saving Application Default Credentials: ' + str(e)) return os.path.abspath(adc_file)
def WriteDockerConfig(contents): """Write the contents to '.dockercfg'. This is public only to facilitate testing. Args: contents: The body to write to '.dockercfg'. """ with files.OpenForWritingPrivate(GetDockerConfig()) as writer: writer.write(contents)
def WriteTemplate(self): """Write the credential file.""" # General credentials used by bq and gsutil. if self.credentials_type != creds.CredentialType.P12_SERVICE_ACCOUNT: SaveCredentialsAsADC(self.credentials, self._adc_path) if self.credentials_type == creds.CredentialType.USER_ACCOUNT: # We create a small .boto file for gsutil, to be put in BOTO_PATH. # Our client_id and client_secret should accompany our refresh token; # if a user loaded any other .boto files that specified a different # id and secret, those would override our id and secret, causing any # attempts to obtain an access token with our refresh token to fail. self._WriteFileContents( self._gsutil_path, '\n'.join([ '[OAuth2]', 'client_id = {cid}', 'client_secret = {secret}', '', '[Credentials]', 'gs_oauth2_refresh_token = {token}', ]).format(cid=config.CLOUDSDK_CLIENT_ID, secret=config.CLOUDSDK_CLIENT_NOTSOSECRET, token=self.credentials.refresh_token)) elif self.credentials_type == creds.CredentialType.SERVICE_ACCOUNT: self._WriteFileContents( self._gsutil_path, '\n'.join([ '[Credentials]', 'gs_service_key_file = {key_file}', ]).format(key_file=self._adc_path)) else: raise CredentialFileSaveError( 'Unsupported credentials type {0}'.format( type(self.credentials))) else: # P12 service account cred = self.credentials key = cred._private_key_pkcs12 # pylint: disable=protected-access password = cred._private_key_password # pylint: disable=protected-access with files.OpenForWritingPrivate(self._p12_key_path, binary=True) as pk: pk.write(key) # the .boto file gets some different fields self._WriteFileContents( self._gsutil_path, '\n'.join([ '[Credentials]', 'gs_service_client_id = {account}', 'gs_service_key_file = {key_file}', 'gs_service_key_file_password = {key_password}', ]).format(account=self.credentials.service_account_email, key_file=self._p12_key_path, key_password=password))
def WriteTemplate(self): """Write the credential file.""" # General credentials used by bq and gsutil. if not isinstance(self.credentials, client.SignedJwtAssertionCredentials): SaveCredentialsAsADC(self.credentials, self._adc_path) if self.credentials.refresh_token: # we create a small .boto file for gsutil, to be put in BOTO_PATH self._WriteFileContents( self._gsutil_path, textwrap.dedent("""\ [Credentials] gs_oauth2_refresh_token = {token} """).format(token=self.credentials.refresh_token)) elif (client.HAS_CRYPTO and isinstance( self.credentials, client.SignedJwtAssertionCredentials)): with files.OpenForWritingPrivate(self._p12_key_path, binary=True) as pk: pk.write(base64.b64decode(self.credentials.private_key)) # the .boto file gets some different fields self._WriteFileContents( self._gsutil_path, textwrap.dedent("""\ [Credentials] gs_service_client_id = {account} gs_service_key_file = {key_file} gs_service_key_file_password = {key_password} """).format(account=self.credentials.service_account_name, key_file=self._p12_key_path, key_password=self.credentials.private_key_password)) # Remove linter directive when # https://github.com/google/oauth2client/issues/165 is addressed. elif isinstance( self.credentials, # pylint: disable=protected-access service_account._ServiceAccountCredentials # pylint: enable=protected-access ): self._WriteFileContents( self._gsutil_path, textwrap.dedent("""\ [Credentials] gs_service_key_file = {key_file} """).format(key_file=self._adc_path)) else: raise CredentialFileSaveError( 'Unsupported credentials type {0}'.format( type(self.credentials)))
def _WriteDisk(self, on_gce): gce_cache_path = config.Paths().GCECachePath() with self.file_lock: try: with files.OpenForWritingPrivate( gce_cache_path) as gcecache_file: gcecache_file.write(str(on_gce)) except (OSError, IOError, files.Error): # Failed to write Google Compute Engine credential cache file. # This could be due to permission reasons, or because it doesn't yet # exist. # Can't log here because the log module depends (indirectly) on this # one. pass
def _WriteFileContents(self, filepath, contents): """Writes contents to a path, ensuring mkdirs. Args: filepath: str, The path of the file to write. contents: str, The contents to write to the file. """ full_path = os.path.realpath(os.path.expanduser(filepath)) try: with files.OpenForWritingPrivate(full_path) as cred_file: cred_file.write(contents) except (OSError, IOError) as e: raise Exception('Failed to open %s for writing: %s' % (filepath, e))
def WriteTemplate(self): """Write the credential file.""" # General credentials used by bq and gsutil. if self.credentials_type != CredentialType.P12_SERVICE_ACCOUNT: SaveCredentialsAsADC(self.credentials, self._adc_path) if self.credentials_type == CredentialType.USER_ACCOUNT: # we create a small .boto file for gsutil, to be put in BOTO_PATH self._WriteFileContents( self._gsutil_path, textwrap.dedent("""\ [Credentials] gs_oauth2_refresh_token = {token} """).format(token=self.credentials.refresh_token)) elif self.credentials_type == CredentialType.SERVICE_ACCOUNT: self._WriteFileContents( self._gsutil_path, textwrap.dedent("""\ [Credentials] gs_service_key_file = {key_file} """).format(key_file=self._adc_path)) else: raise CredentialFileSaveError( 'Unsupported credentials type {0}'.format( type(self.credentials))) else: # P12 service account cred = self.credentials key = cred._private_key_pkcs12 # pylint: disable=protected-access password = cred._private_key_password # pylint: disable=protected-access with files.OpenForWritingPrivate(self._p12_key_path, binary=True) as pk: pk.write(key) # the .boto file gets some different fields self._WriteFileContents( self._gsutil_path, textwrap.dedent("""\ [Credentials] gs_service_client_id = {account} gs_service_key_file = {key_file} gs_service_key_file_password = {key_password} """).format(account=self.credentials.service_account_email, key_file=self._p12_key_path, key_password=password))
def UpdateKnownHostsFile(known_hosts_file, hostname, host_key, overwrite_keys=False): """Update the known_hosts file entry for the given hostname. If there is no entry for the give hostname, it will be added. If there is an entry already and overwrite_keys is False, nothing will be changed. If there is an entry and overwrite_keys is True, the key will be updated if it has changed. Args: known_hosts_file: str, The full path of the known_hosts file to update. hostname: str, The hostname for the known_hosts entry. host_key: str, The host key for the given hostname. overwrite_keys: bool, If true, will overwrite the entry corresponding to hostname with the new host_key if it already exists. If false and an entry already exists for hostname, will ignore the new host_key value. """ known_hosts_contents = ReadFile(known_hosts_file) key_list = known_hosts_contents.splitlines() found_key_entry = None new_key_entry = '{0} {1}'.format(hostname, host_key) for key in key_list: if key.startswith(hostname): found_key_entry = key break if overwrite_keys and found_key_entry: if found_key_entry != new_key_entry: key_list.remove(found_key_entry) found_key_entry = None if not found_key_entry: key_list.append(new_key_entry) new_contents = '\n'.join(key_list) + '\n' with files.OpenForWritingPrivate(known_hosts_file) as f: f.write(new_contents)
def WriteDockerConfig(contents): """Write the contents to '.dockercfg'. This is public only to facilitate testing. Args: contents: The body to write to '.dockercfg'. """ if platforms.OperatingSystem.Current( ) == platforms.OperatingSystem.WINDOWS: # On windows, there is no good way to atomically write this file. with files.OpenForWritingPrivate(GetDockerConfig()) as writer: writer.write(contents) return # This opens files with 0600, which are the correct permissions. cfg = GetDockerConfig() with tempfile.NamedTemporaryFile(dir=os.path.dirname(cfg), delete=False) as tf: tf.write(contents) # This pattern atomically writes the file on non-Windows systems. os.rename(tf.name, cfg)
def ActivateGitP2D(account, creds, netrc_path=None): """Modify the user's netrc file so that they can use git push-to-deploy. Args: account: str, The account that is being activated. creds: oauth2client.client.Credentials, The credentials that will be inspected for a refresh token. netrc_path: str, Path to an alternative netrc file. """ if not creds.refresh_token: raise c_store.Error('Active credentials have no refresh token.') new_machine = _NEW_MACHINE_FORMAT.format(login=account, password=creds.refresh_token) if not netrc_path: if (platforms.OperatingSystem.Current() == platforms.OperatingSystem.WINDOWS): # Yes, the right place on windows is "%HOME%\_netrc". netrc_name = '_netrc' else: netrc_name = '.netrc' try: netrc_path = os.path.join(os.environ['HOME'], netrc_name) except KeyError: raise c_exc.BadFileException( 'Cannot find %s file ($HOME is not set).' % netrc_name) if not os.path.exists(netrc_path): netrc_data = '' else: with open(netrc_path) as netrc_file: netrc_data = netrc_file.read() new_data = _ScrapeAndReplaceNetRC(netrc_data, new_machine) with files.OpenForWritingPrivate(netrc_path) as netrc_file: netrc_file.write(new_data)
def SaveCredentialsAsADC(credentials, file_path): """Saves the credentials to the given file. This file can be read back via cred = client.GoogleCredentials.from_stream(file_path) Args: credentials: client.OAuth2Credentials, obtained from a web flow or service account. file_path: str, file path to store credentials to. The file will be created. Raises: CredentialFileSaveError: on file io errors. """ creds_type = creds.CredentialType.FromCredentials(credentials) if creds_type == creds.CredentialType.P12_SERVICE_ACCOUNT: raise CredentialFileSaveError( 'Error saving Application Default Credentials: p12 keys are not' 'supported in this format') if creds_type == creds.CredentialType.USER_ACCOUNT: credentials = client.GoogleCredentials( credentials.access_token, credentials.client_id, credentials.client_secret, credentials.refresh_token, credentials.token_expiry, credentials.token_uri, credentials.user_agent, credentials.revoke_uri) try: with files.OpenForWritingPrivate(file_path) as f: json.dump(credentials.serialization_data, f, sort_keys=True, indent=2, separators=(',', ': ')) except IOError as e: log.debug(e, exc_info=True) raise CredentialFileSaveError( 'Error saving Application Default Credentials: ' + str(e))
def WriteFile(self, file_name, contents, make_private=False): """Writes a file, automatically handling all relevant errors. Args: file_name: The file to write contents: The data to write into the file make_private: If True, set the permission of the file to user read/write only. Otherwise set it as public. Default to False. Raises: ToolException: An error occurred when trying to write the file. """ try: if make_private: with files.OpenForWritingPrivate(file_name, binary=True) as handle: handle.write(contents) else: with open(file_name, 'wb') as handle: handle.write(contents) except EnvironmentError: raise exceptions.ToolException( 'The given file could not be written: {0}'.format(file_name))
def SaveCredentialsAsADC(creds, file_path): """Saves the credentials to the given file. This file can be read back via cred = client.GoogleCredentials.from_stream(file_path) Args: creds: client.OAuth2Credentials, obtained from a web flow or service account. file_path: str, file path to store credentials to. The file will be created. Raises: CredentialFileSaveError: on file io errors. """ if isinstance(creds, client.SignedJwtAssertionCredentials): raise CredentialFileSaveError( 'Error saving Application Default Credentials: p12 keys are not' 'supported in this format') # pylint: disable=protected-access if not isinstance(creds, service_account._ServiceAccountCredentials): creds = client.GoogleCredentials(creds.access_token, creds.client_id, creds.client_secret, creds.refresh_token, creds.token_expiry, creds.token_uri, creds.user_agent, creds.revoke_uri) try: with files.OpenForWritingPrivate(file_path) as f: json.dump(creds.serialization_data, f, sort_keys=True, indent=2, separators=(',', ': ')) except IOError as e: log.debug(e, exc_info=True) raise CredentialFileSaveError( 'Error saving Application Default Credentials: ' + str(e))
def Run(self, args): super(ConfigSSH, self).Run(args) ssh_config_file = os.path.expanduser(args.ssh_config_file or ssh.PER_USER_SSH_CONFIG_FILE) instances = None if args.remove: compute_section = '' else: self.EnsureSSHKeyIsInProject(getpass.getuser()) instances = list(self.GetInstances()) if instances: compute_section = _BuildComputeSection( instances, self.ssh_key_file, ssh.KnownHosts.DEFAULT_PATH) else: compute_section = '' try: existing_content = files.GetFileContents(ssh_config_file) except files.Error as e: existing_content = '' log.debug('SSH Config File [{0}] could not be opened: {1}'.format( ssh_config_file, e)) if existing_content: section_re = re.compile(_COMPUTE_SECTION_RE, flags=re.MULTILINE | re.DOTALL) match = section_re.search(existing_content) if not match: # There are no existing Compute Engine sections. If there is # at least one instance in the project (signified by # compute_section not being None), we append it to the end of # the configs. Otherwise, we set content to None which will # cause nothing to be written to the SSH config file. if compute_section: # Ensures that there is a blank line between the existing # configs and the Compute section. if existing_content[-1] != '\n': existing_content += '\n' if existing_content[-2:] != '\n\n': existing_content += '\n' new_content = existing_content + compute_section else: new_content = existing_content elif section_re.search(existing_content[match.end(1):]): # Multiple Compute Engine sections. raise exceptions.ToolException( 'Found more than one Google Compute Engine section in [{0}]. ' 'You can either delete [{0}] and let this command recreate it for ' 'you or you can manually delete all sections marked with ' '[{1}] and [{2}].'.format(ssh_config_file, _BEGIN_MARKER, _END_MARKER)) else: # One Compute Engine section -- replace it. new_content = '{before}{new}{after}'.format( before=existing_content[0:match.start(1)], new=compute_section, after=existing_content[match.end(1):]) else: new_content = compute_section if args.dry_run: log.out.write(new_content or '') return if new_content != existing_content: if (os.path.exists(ssh_config_file) and platforms.OperatingSystem.Current() is not platforms.OperatingSystem.WINDOWS): ssh_config_perms = os.stat(ssh_config_file).st_mode # From `man 5 ssh_config`: # this file must have strict permissions: read/write for the user, # and not accessible by others. # We check that here: if not (ssh_config_perms & stat.S_IRWXU == stat.S_IWUSR | stat.S_IRUSR and ssh_config_perms & stat.S_IWGRP == 0 and ssh_config_perms & stat.S_IWOTH == 0): log.warn( 'Invalid permissions on [{0}]. Please change to match ssh ' 'requirements (see man 5 ssh).') # TODO(user): This write will not work very well if there is # a lot of write contention for the SSH config file. We should # add a function to do a better job at "atomic file writes". with files.OpenForWritingPrivate(ssh_config_file) as f: f.write(new_content) if compute_section: log.out.write( textwrap.dedent("""\ You should now be able to use ssh/scp with your instances. For example, try running: $ ssh {alias} """.format(alias=_CreateAlias(instances[0])))) elif not instances and not args.remove: log.warn( 'No host aliases were added to your SSH configs because you do not ' 'have any instances. Try running this command again after creating ' 'some instances.')
def Write(self): """Writes the file to disk.""" with files.OpenForWritingPrivate(self.file_path) as f: f.write('\n'.join(self.known_hosts) + '\n')
def Run(self, args): """Creates an SSL certificate for a Cloud SQL instance. Args: args: argparse.Namespace, The arguments that this command was invoked with. Returns: A dict object representing the operations resource describing the create operation if the create was successful. Raises: HttpException: A http error response was received while executing api request. ToolException: An error other than http error occured while executing the command. """ if os.path.exists(args.cert_file): raise exceptions.ToolException('file [{path}] already exists'.format( path=args.cert_file)) # First check if args.out_file is writeable. If not, abort and don't create # the useless cert. try: with files.OpenForWritingPrivate(args.cert_file) as cf: cf.write('placeholder\n') except (files.Error, OSError) as e: raise exceptions.ToolException('unable to write [{path}]: {error}'.format( path=args.cert_file, error=str(e))) sql_client = self.context['sql_client'] sql_messages = self.context['sql_messages'] resources = self.context['registry'] validate.ValidateInstanceName(args.instance) instance_ref = resources.Parse(args.instance, collection='sql.instances') # TODO(jasmuth): figure out how to rectify the common_name and the # sha1fingerprint, so that things can work with the resource parser. result = sql_client.sslCerts.Insert( sql_messages.SqlSslCertsInsertRequest( project=instance_ref.project, instance=instance_ref.instance, sslCertsInsertRequest=sql_messages.SslCertsInsertRequest( commonName=args.common_name))) private_key = result.clientCert.certPrivateKey with files.OpenForWritingPrivate(args.cert_file) as cf: cf.write(private_key) cf.write('\n') cert_ref = resources.Create( collection='sql.sslCerts', project=instance_ref.project, instance=instance_ref.instance, sha1Fingerprint=result.clientCert.certInfo.sha1Fingerprint) log.CreatedResource(cert_ref) return result
def Run(self, args): """Creates an SSL certificate for a Cloud SQL instance. Args: args: argparse.Namespace, The arguments that this command was invoked with. Returns: A dict object representing the operations resource describing the create operation if the create was successful. Raises: ArgumentError: If the file path provided cannot be written to. """ if os.path.exists(args.cert_file): raise exceptions.ArgumentError( 'file [{path}] already exists'.format(path=args.cert_file)) # First check if args.out_file is writeable. If not, abort and don't create # the useless cert. try: with files.OpenForWritingPrivate(args.cert_file) as cf: cf.write('placeholder\n') except (files.Error, OSError) as e: raise exceptions.ArgumentError( 'unable to write [{path}]: {error}'.format(path=args.cert_file, error=str(e))) client = api_util.SqlClient(api_util.API_VERSION_DEFAULT) sql_client = client.sql_client sql_messages = client.sql_messages validate.ValidateInstanceName(args.instance) instance_ref = client.resource_parser.Parse( args.instance, params={'project': properties.VALUES.core.project.GetOrFail}, collection='sql.instances') # TODO(b/36049399): figure out how to rectify the common_name and the # sha1fingerprint, so that things can work with the resource parser. result = sql_client.sslCerts.Insert( sql_messages.SqlSslCertsInsertRequest( project=instance_ref.project, instance=instance_ref.instance, sslCertsInsertRequest=sql_messages.SslCertsInsertRequest( commonName=args.common_name))) private_key = result.clientCert.certPrivateKey with files.OpenForWritingPrivate(args.cert_file) as cf: cf.write(private_key) cf.write('\n') cert_ref = client.resource_parser.Create( collection='sql.sslCerts', project=instance_ref.project, instance=instance_ref.instance, sha1Fingerprint=result.clientCert.certInfo.sha1Fingerprint) log.CreatedResource(cert_ref) return result.clientCert.certInfo
def WriteTemplate(self): """Write the credential file.""" # straight up credentials in JSON self._WriteFileContents(self._json_path, self.credentials.to_json()) # multistore version self._WriteFileContents(self._multistore_path, '') storage = oauth2_multistore_file.get_credential_storage( self._multistore_path, self.credentials.client_id, self.credentials.user_agent, self.scopes) storage.put(self.credentials) if self.credentials.refresh_token: # gae java wants something special self._WriteFileContents( self._gae_java_path, textwrap.dedent("""\ oauth2_client_secret: {secret} oauth2_client_id: {id} oauth2_refresh_token: {token} """).format(secret=config.CLOUDSDK_CLIENT_NOTSOSECRET, id=config.CLOUDSDK_CLIENT_ID, token=self.credentials.refresh_token)) # we create a small .boto file for gsutil, to be put in BOTO_PATH self._WriteFileContents( self._gsutil_path, textwrap.dedent("""\ [Credentials] gs_oauth2_refresh_token = {token} """).format(token=self.credentials.refresh_token)) if (oauth2_client.HAS_CRYPTO and type(self.credentials) == oauth2_client.SignedJwtAssertionCredentials): with files.OpenForWritingPrivate(self._key_path) as pk: pk.write(base64.b64decode(self.credentials.private_key)) # the .boto file gets some different fields self._WriteFileContents( self._gsutil_path, textwrap.dedent("""\ [Credentials] gs_service_client_id = {account} gs_service_key_file = {key_file} gs_service_key_file_password = {key_password} """).format(account=self.credentials.service_account_name, key_file=self._key_path, key_password=self.credentials.private_key_password)) # pylint: disable=protected-access # Remove linter directive when # https://github.com/google/oauth2client/issues/165 is addressed. if isinstance(self.credentials, oauth2_service_account._ServiceAccountCredentials): # TODO(cherba): Currently activate-service-account discards the JSON # key file after reading it; save it so that we can hand it to gsutil. # For now, serialize the credentials back to their original # JSON key file form. json_key_dict = { 'client_id': self.credentials._service_account_id, 'client_email': self.credentials._service_account_email, 'private_key': self.credentials._private_key_pkcs8_text, 'private_key_id': self.credentials._private_key_id, 'type': 'service_account' } with files.OpenForWritingPrivate(self._json_key_path) as pk: pk.write(json.dumps(json_key_dict)) self._WriteFileContents( self._gsutil_path, textwrap.dedent("""\ [Credentials] gs_service_key_file = {key_file} """).format(key_file=self._json_key_path))
def _CacheIsOnGCE(on_gce): with files.OpenForWritingPrivate( config.Paths().GCECachePath()) as gcecache_file: gcecache_file.write(str(on_gce))
def Run(self, args): """See ssh_utils.BaseSSHCommand.Run.""" super(ConfigSSH, self).Run(args) self.keys.EnsureKeysExist(args.force_key_file_overwrite, allow_passphrase=True) ssh_config_file = os.path.expanduser(args.ssh_config_file or ssh.PER_USER_SSH_CONFIG_FILE) instances = None try: existing_content = files.GetFileContents(ssh_config_file) except files.Error as e: existing_content = '' log.debug('SSH Config File [{0}] could not be opened: {1}'.format( ssh_config_file, e)) if args.remove: compute_section = '' try: new_content = _RemoveComputeSection(existing_content) except MultipleComputeSectionsError: raise MultipleComputeSectionsError(ssh_config_file) else: self.EnsureSSHKeyIsInProject( ssh.GetDefaultSshUsername(warn_on_account_user=True)) instances = list(self.GetInstances()) if instances: compute_section = _BuildComputeSection( instances, self.keys.key_file, ssh.KnownHosts.DEFAULT_PATH) else: compute_section = '' if existing_content and not args.remove: try: new_content = _MergeComputeSections(existing_content, compute_section) except MultipleComputeSectionsError: raise MultipleComputeSectionsError(ssh_config_file) elif not existing_content: new_content = compute_section if args.dry_run: log.out.write(new_content or '') return if new_content != existing_content: if (os.path.exists(ssh_config_file) and platforms.OperatingSystem.Current() is not platforms.OperatingSystem.WINDOWS): ssh_config_perms = os.stat(ssh_config_file).st_mode # From `man 5 ssh_config`: # this file must have strict permissions: read/write for the user, # and not accessible by others. # We check that here: if not (ssh_config_perms & stat.S_IRWXU == stat.S_IWUSR | stat.S_IRUSR and ssh_config_perms & stat.S_IWGRP == 0 and ssh_config_perms & stat.S_IWOTH == 0): log.warn( 'Invalid permissions on [{0}]. Please change to match ssh ' 'requirements (see man 5 ssh).') # TODO(b/36050483): This write will not work very well if there is # a lot of write contention for the SSH config file. We should # add a function to do a better job at "atomic file writes". with files.OpenForWritingPrivate(ssh_config_file) as f: f.write(new_content) if compute_section: log.out.write( textwrap.dedent("""\ You should now be able to use ssh/scp with your instances. For example, try running: $ ssh {alias} """.format(alias=_CreateAlias(instances[0])))) elif not instances and not args.remove: log.warn( 'No host aliases were added to your SSH configs because you do not ' 'have any instances. Try running this command again after creating ' 'some instances.')