Esempio n. 1
0
  def CreateSslCertificate(self, display_name, cert_path, private_key_path):
    """Creates a certificate for the given application.

    Args:
      display_name: str, the display name for the new certificate.
      cert_path: str, location on disk to a certificate file.
      private_key_path: str, location on disk to a private key file.

    Returns:
      The created AuthorizedCertificate object.

    Raises:
      Error if the file does not exist or can't be opened/read.
    """
    certificate_data = files.GetFileContents(cert_path)
    private_key_data = files.GetFileContents(private_key_path)

    cert = self.messages.CertificateRawData(
        privateKey=private_key_data, publicCertificate=certificate_data)

    auth_cert = self.messages.AuthorizedCertificate(
        displayName=display_name, certificateRawData=cert)

    request = self.messages.AppengineAppsAuthorizedCertificatesCreateRequest(
        parent=self._FormatApp(), authorizedCertificate=auth_cert)

    return self.client.apps_authorizedCertificates.Create(request)
Esempio n. 2
0
    def UpdateSslCertificate(self,
                             cert_id,
                             display_name=None,
                             cert_path=None,
                             private_key_path=None):
        """Updates a certificate for the given application.

    One of display_name, cert_path, or private_key_path should be set. Omitted
    fields will not be updated from their current value. Any invalid arguments
    will fail the entire command.

    Args:
      cert_id: str, the id of the certificate to update.
      display_name: str, the display name for a new certificate.
      cert_path: str, location on disk to a certificate file.
      private_key_path: str, location on disk to a private key file.

    Returns:
      The created AuthorizedCertificate object.

    Raises: InvalidInputError if the user does not specify both cert and key.
    """
        if bool(cert_path) ^ bool(private_key_path):
            missing_arg = '--certificate' if not cert_path else '--private-key'
            raise exceptions.RequiredArgumentException(
                missing_arg,
                'The certificate and the private key must both be updated together.'
            )

        mask_fields = []

        if display_name:
            mask_fields.append('displayName')

        cert_data = None
        if cert_path and private_key_path:
            certificate = files.GetFileContents(cert_path)
            private_key = files.GetFileContents(private_key_path)
            cert_data = self.messages.CertificateRawData(
                privateKey=private_key, publicCertificate=certificate)
            mask_fields.append('certificateRawData')

        auth_cert = self.messages.AuthorizedCertificate(
            displayName=display_name, certificateRawData=cert_data)

        if not mask_fields:
            raise exceptions.MinimumArgumentException([
                '--certificate', '--private-key', '--display-name'
            ], 'Please specify at least one attribute to the certificate update.'
                                                      )

        request = self.messages.AppengineAppsAuthorizedCertificatesPatchRequest(
            name=self._FormatSslCert(cert_id),
            authorizedCertificate=auth_cert,
            updateMask=','.join(mask_fields))

        return self.client.apps_authorizedCertificates.Patch(request)
Esempio n. 3
0
def GetFileAsMessage(path, message):
    """Reads a YAML or JSON object of type message from local path.

  Args:
    path: A local path to an object specification in YAML or JSON format.
    message: The message type to be parsed from the file.

  Returns:
    Object of type message, if successful.
  Raises:
    files.Error, exceptions.ResourceManagerInputFileError
  """
    in_text = files.GetFileContents(path)
    if not in_text:
        raise exceptions.ResourceManagerInputFileError(
            'Empty policy file [{0}]'.format(path))

    # Parse it, first trying YAML then JSON.
    try:
        result = encoding.PyValueToMessage(message, yaml.load(in_text))
    except (ValueError, AttributeError, yaml.YAMLParseError):
        try:
            result = encoding.JsonToMessage(message, in_text)
        except (ValueError, DecodeError) as e:
            # ValueError is raised when JSON is badly formatted
            # DecodeError is raised when a tag is badly formatted (not Base64)
            raise exceptions.ResourceManagerInputFileError(
                'Policy file [{0}] is not properly formatted YAML or JSON '
                'due to [{1}]'.format(path, str(e)))
    return result
Esempio n. 4
0
def LoadResourceFile(input_fname):
  """Load an input resource file in either JSON or YAML format.

  Args:
    input_fname: The name of the file to convert to parse.

  Returns:
    The Python object resulting from the decode.

  Raises:
    ResourceFileReadError: An error occurred attempting to read the input file.
    ResourceFileTypeError: The input file was an unsupported type.
    ResourceFileParseError: A parse error occurred.
  """
  try:
    input_text = files.GetFileContents(input_fname)
  except files.Error as e:
    raise ResourceFileReadError(str(e))

  file_type = GetResourceFileType(input_fname)
  if file_type == ResourceFileType.JSON:
    try:
      return json.loads(input_text)
    except ValueError as e:
      raise ResourceFileParseError('Error in resource file JSON: ' + str(e))
  elif file_type == ResourceFileType.YAML:
    try:
      return yaml.load(input_text)
    except yaml.YAMLParseError as e:
      raise ResourceFileParseError('Error in resource file YAML: ' + str(e))
  else:  # file_type == ResourceFileType.UNKNOWN
    raise ResourceFileTypeError(
        'Input file [{}] not of type YAML or JSON'.format(input_fname))
Esempio n. 5
0
 def CreateLegacyAttestation(
     self,
     project_ref,
     normalized_artifact_url,
     signature,
     public_key_file,
 ):
   public_key = files.GetFileContents(public_key_file)
   provider_ref = binauthz_command_util.CreateProviderRefFromProjectRef(
       project_ref)
   note_id = binauthz_command_util.NoteId(
       artifact_url=normalized_artifact_url,
       public_key=public_key,
       signature=signature,
   )
   provider_note_ref = binauthz_command_util.ParseProviderNote(
       note_id=note_id,
       provider_ref=provider_ref,
   )
   return binauthz_api_util.ContainerAnalysisLegacyClient().PutSignature(
       occurrence_project_ref=project_ref,
       provider_ref=provider_ref,
       provider_note_ref=provider_note_ref,
       note_id=note_id,
       artifact_url=normalized_artifact_url,
       public_key=public_key,
       signature=signature,
   )
def _GetYamlPath(source_dir, service_path, skip_files, gen_files):
  """Returns the yaml path, optionally updating gen_files.

  Args:
    source_dir: str, the absolute path to the root of the application directory.
    service_path: str, the absolute path to the service YAML file
    skip_files: appengine.api.Validation._RegexStr, the validated regex object
      from the service info file.
    gen_files: dict, the dict of files to generate. May be updated if a file
      needs to be generated.

  Returns:
    str, the relative path to the service YAML file that should be used for
      build.
  """
  if files.IsDirAncestorOf(source_dir, service_path):
    rel_path = os.path.relpath(service_path, start=source_dir)
    if not util.ShouldSkip(skip_files, rel_path):
      return rel_path
  yaml_contents = files.GetFileContents(service_path)
  # Use a checksum to ensure file uniqueness, not for security reasons.
  checksum = files.Checksum().AddContents(yaml_contents).HexDigest()
  generated_path = '_app_{}.yaml'.format(checksum)
  gen_files[generated_path] = yaml_contents
  return generated_path
def CredentialsFromAdcFile(filename):
    """Load credentials from given service account json file."""
    content = files.GetFileContents(filename)
    try:
        json_key = json.loads(content)
        return CredentialsFromAdcDict(json_key)
    except ValueError as e:
        raise BadCredentialFileException(
            'Could not read json file {0}: {1}'.format(filename, e))
def _IsJsonFile(filename):
    """Check and validate if given filename is proper json file."""
    content = files.GetFileContents(filename)
    try:
        json.loads(content)
        return True
    except ValueError as e:
        if filename.endswith('.json'):
            raise auth_service_account.BadCredentialFileException(
                'Could not read json file {0}: {1}'.format(filename, e))
    return False
Esempio n. 9
0
def GetFileAsMessage(path, message, client):
    """Reads a YAML or JSON object of type message from path (local or GCS).

  Args:
    path: A local or GCS path to an object specification in YAML or JSON format.
    message: The message type to be parsed from the file.
    client: The storage_v1 client to use.

  Returns:
    Object of type message, if successful.
  Raises:
    files.Error, genomics_exceptions.GenomicsInputFileError
  """
    if path.startswith(GCS_PREFIX):
        # Download remote file to a local temp file
        tf = tempfile.NamedTemporaryFile(delete=False)
        tf.close()

        bucket, obj = _SplitBucketAndObject(path)
        storage_messages = core_apis.GetMessagesModule('storage', 'v1')
        get_request = storage_messages.StorageObjectsGetRequest(bucket=bucket,
                                                                object=obj)
        try:
            download = transfer.Download.FromFile(tf.name, overwrite=True)
            client.objects.Get(get_request, download=download)
            del download  # Explicitly close the stream so the results are there
        except apitools_exceptions.HttpError as e:
            raise genomics_exceptions.GenomicsInputFileError(
                'Unable to read remote file [{0}] due to [{1}]'.format(
                    path, str(e)))
        path = tf.name

    # Read the file.
    in_text = files.GetFileContents(path)
    if not in_text:
        raise genomics_exceptions.GenomicsInputFileError(
            'Empty file [{0}]'.format(path))

    # Parse it, first trying YAML then JSON.
    try:
        result = encoding.PyValueToMessage(message, yaml.load(in_text))
    except (ValueError, AttributeError, yaml.YAMLError) as e:
        try:
            result = encoding.JsonToMessage(message, in_text)
        except (ValueError, DecodeError) as e:
            # ValueError is raised when JSON is badly formatted
            # DecodeError is raised when a tag is badly formatted (not Base64)
            raise genomics_exceptions.GenomicsInputFileError(
                'Pipeline file [{0}] is not properly formatted YAML or JSON '
                'due to [{1}]'.format(path, str(e)))
    return result
Esempio n. 10
0
    def Run(self, args):
        client, messages = util.GetClientAndMessages()
        response = client.projects_serviceAccounts.SignJwt(
            messages.IamProjectsServiceAccountsSignJwtRequest(
                name=iam_util.EmailToAccountResourceName(args.iam_account),
                signJwtRequest=messages.SignJwtRequest(
                    payload=files.GetFileContents(args.input, binary=False))))

        log.WriteToFileOrStdout(args.output,
                                content=response.signedJwt,
                                binary=True)
        log.status.Print(
            'signed jwt [{0}] as [{1}] for [{2}] using key [{3}]'.format(
                args.input, args.output, args.iam_account, response.keyId))
Esempio n. 11
0
def ReplicaConfiguration(sql_messages,
                         master_username,
                         master_password,
                         master_dump_file_path,
                         master_ca_certificate_path=None,
                         client_certificate_path=None,
                         client_key_path=None):
    """Generates the config for an external master replica.

  Args:
    sql_messages: module, The messages module that should be used.
    master_username: The username for connecting to the external instance.
    master_password: The password for connecting to the external instance.
    master_dump_file_path: ObjectReference, a wrapper for the URI of the Cloud
        Storage path containing the dumpfile to seed the replica with.
    master_ca_certificate_path: The path to the CA certificate PEM file.
    client_certificate_path: The path to the client certificate PEM file.
    client_key_path: The path to the client private key PEM file.

  Returns:
    sql_messages.MySqlReplicaConfiguration object.
  """
    mysql_replica_configuration = sql_messages.MySqlReplicaConfiguration(
        username=master_username,
        password=master_password,
        dumpFilePath=master_dump_file_path.ToUrl())
    if master_ca_certificate_path:
        mysql_replica_configuration.caCertificate = files.GetFileContents(
            master_ca_certificate_path)
    if client_certificate_path:
        mysql_replica_configuration.clientCertificate = files.GetFileContents(
            client_certificate_path)
    if client_key_path:
        mysql_replica_configuration.clientKey = files.GetFileContents(
            client_key_path)
    return sql_messages.ReplicaConfiguration(
        mysqlReplicaConfiguration=mysql_replica_configuration)
Esempio n. 12
0
def AddOptions(messages, options_file, type_provider):
    """Parse api options from the file and add them to type_provider.

  Args:
    messages: The API message to use.
    options_file: String path expression pointing to a type-provider options
        file.
    type_provider: A TypeProvider message on which the options will be set.
  Returns:
    The type_provider after applying changes.
  Raises:
    exceptions.ConfigError: the api options file couldn't be parsed as yaml
  """
    if not options_file:
        return type_provider

    file_contents = files.GetFileContents(options_file)
    yaml_content = None
    try:
        yaml_content = yaml.safe_load(file_contents)
    except yaml.YAMLError as exc:
        raise exceptions.ConfigError(
            'Could not load yaml file {0}: {1}'.format(options_file, exc))

    if yaml_content:
        if 'collectionOverrides' in yaml_content:
            type_provider.collectionOverrides = []

            for collection_override_data in yaml_content[
                    'collectionOverrides']:
                collection_override = messages.CollectionOverride(
                    collection=collection_override_data['collection'])

                if 'options' in collection_override_data:
                    collection_override.options = _OptionsFrom(
                        messages, collection_override_data['options'])

                type_provider.collectionOverrides.append(collection_override)

        if 'options' in yaml_content:
            type_provider.options = _OptionsFrom(messages,
                                                 yaml_content['options'])

        if 'credential' in yaml_content:
            type_provider.credential = _CredentialFrom(
                messages, yaml_content['credential'])

    return type_provider
Esempio n. 13
0
def ConstructUpdateMaskFromPolicy(policy_file_path):
    """Construct a FieldMask based on input policy.

  Args:
    policy_file_path: Path to the JSON or YAML IAM policy file.
  Returns:
    a FieldMask containing policy fields to be modified, based on which fields
    are present in the input file.
  """
    policy_file = files.GetFileContents(policy_file_path)
    # Since json is a subset of yaml, parse file as yaml.
    policy = yaml.load(policy_file)

    # The IAM update mask should only contain top level fields. Sort the fields
    # for testing purposes.
    return ','.join(sorted(policy.keys()))
Esempio n. 14
0
def ReadFromFileOrStdin(path, binary):
    """Returns the contents of the specified file or stdin if path is '-'.

  Args:
    path: str, The path of the file to read.
    binary: bool, True to open the file in binary mode.

  Raises:
    Error: If the file cannot be read or is larger than max_bytes.

  Returns:
    The contents of the file.
  """
    if path == '-':
        return ReadStdin(binary=binary)
    return files.GetFileContents(path, binary=binary)
Esempio n. 15
0
  def FromFile(cls, file_path):
    """Create a KnownHosts object given a known_hosts_file.

    Args:
      file_path: str, path to the known_hosts_file.

    Returns:
      KnownHosts object corresponding to the file. If the file could not be
      opened, the KnownHosts object will have no entries.
    """
    try:
      known_hosts = files.GetFileContents(file_path).splitlines()
    except files.Error as e:
      known_hosts = []
      log.debug('SSH Known Hosts File [{0}] could not be opened: {1}'
                .format(file_path, e))
    return KnownHosts(known_hosts, file_path)
Esempio n. 16
0
def AddOptions(options_file, type_provider):
    """Parse api options from the file and add them to type_provider.

  Args:
    options_file: String path expression pointing to a type-provider options
        file.
    type_provider: A TypeProvider message on which the options will be set.
  Returns:
    The type_provider after applying changes.
  Raises:
    exceptions.ConfigError: the api options file couldn't be parsed as yaml
  """
    file_contents = files.GetFileContents(options_file)
    yaml_content = None
    try:
        yaml_content = yaml.safe_load(file_contents)
    except yaml.YAMLError, exc:
        raise exceptions.ConfigError(
            'Could not load yaml file {0}: {1}'.format(options_file, exc))
Esempio n. 17
0
def ConstructUpdateMaskFromPolicy(policy_file_path):
    """Construct a FieldMask based on input policy.

  Args:
    policy_file_path: Path to the JSON or YAML IAM policy file.
  Returns:
    a FieldMask containing policy fields to be modified, based on which fields
    are present in the input file.
  """
    policy_file = files.GetFileContents(policy_file_path)
    try:
        # Since json is a subset of yaml, parse file as yaml.
        policy = yaml.load(policy_file)
    except yaml.YAMLError as e:
        raise gcloud_exceptions.BadFileException(
            'Policy file {0} is not a properly formatted JSON or YAML policy file'
            '. {1}'.format(policy_file_path, str(e)))

    # The IAM update mask should only contain top level fields. Sort the fields
    # for testing purposes.
    return ','.join(sorted(policy.keys()))
 def Run(self, args):
     project_ref = resources.REGISTRY.Parse(
         properties.VALUES.core.project.Get(required=True),
         collection='cloudresourcemanager.projects')
     normalized_artifact_url = binauthz_command_util.NormalizeArtifactUrl(
         args.artifact_url)
     public_key = files.GetFileContents(args.public_key_file)
     signature = files.GetFileOrStdinContents(args.signature_file)
     note_id = binauthz_command_util.NoteId(normalized_artifact_url,
                                            public_key, signature)
     provider_ref = binauthz_command_util.CreateProviderRefFromProjectRef(
         project_ref)
     provider_note_ref = binauthz_command_util.ParseProviderNote(
         note_id=note_id, provider_ref=provider_ref)
     return binauthz_api_util.ContainerAnalysisClient().PutSignature(
         occurrence_project_ref=project_ref,
         provider_ref=provider_ref,
         provider_note_ref=provider_note_ref,
         note_id=note_id,
         artifact_url=normalized_artifact_url,
         public_key=public_key,
         signature=signature)
Esempio n. 19
0
def _ValidateAndMergeArgInputs(args):
    """Turn args.inputs and args.inputs_from_file dicts into a single dict.

  Args:
    args: The parsed command-line arguments

  Returns:
    A dict that is the merge of args.inputs and args.inputs_from_file
  Raises:
    files.Error
  """

    # If no inputs from file, then no validation or merge needed
    if not args.inputs_from_file:
        return args.inputs

    # Initialize the merged dictionary
    arg_inputs = {}

    if args.inputs:
        # Validate args.inputs and args.inputs-from-file do not overlap
        overlap = set(args.inputs.keys()).intersection(
            set(args.inputs_from_file.keys()))
        if overlap:
            raise exceptions.GenomicsError(
                '--{0} and --{1} may not specify overlapping values: {2}'.
                format('inputs', 'inputs-from-file', ', '.join(overlap)))

        # Add the args.inputs
        arg_inputs.update(args.inputs)

    # Read up the inputs-from-file and add the values from the file
    for key, value in six.iteritems(args.inputs_from_file):
        arg_inputs[key] = files.GetFileContents(value)

    return arg_inputs
Esempio n. 20
0
def GetFileContents(*path_parts):
    """Returns file content at specified relative path wrt SDK root path."""
    return files.GetFileContents(os.path.join(SDK_ROOT, *path_parts)).strip()
Esempio n. 21
0
def BuildAndPushDockerImage(
        project,
        service,
        source_dir,
        version_id,
        code_bucket_ref,
        gcr_domain,
        runtime_builder_strategy=runtime_builders.RuntimeBuilderStrategy.NEVER
):
    """Builds and pushes a set of docker images.

  Args:
    project: str, The project being deployed to.
    service: ServiceYamlInfo, The parsed service config.
    source_dir: str, path to the service's source directory
    version_id: The version id to deploy these services under.
    code_bucket_ref: The reference to the GCS bucket where the source will be
      uploaded.
    gcr_domain: str, Cloud Registry domain, determines the physical location
      of the image. E.g. `us.gcr.io`.
    runtime_builder_strategy: runtime_builders.RuntimeBuilderStrategy, whether
      to use the new CloudBuild-based runtime builders (alternative is old
      externalized runtimes).

  Returns:
    str, The name of the pushed container image.

  Raises:
    DockerfileError: if a Dockerfile is present, but the runtime is not
      "custom".
    NoDockerfileError: Raised if a user didn't supply a Dockerfile and chose a
      custom runtime.
    UnsatisfiedRequirementsError: Raised if the code in the directory doesn't
      satisfy the requirements of the specified runtime type.
  """
    needs_dockerfile = _NeedsDockerfile(service, source_dir)
    use_runtime_builders = runtime_builder_strategy.ShouldUseRuntimeBuilders(
        service.runtime, needs_dockerfile)

    # Nothing to do if this is not an image-based deployment.
    if not service.RequiresImage():
        return None
    log.status.Print(
        'Building and pushing image for service [{service}]'.format(
            service=service.module))

    gen_files = dict(_GetSourceContextsForUpload(source_dir))
    if needs_dockerfile and not use_runtime_builders:
        # The runtime builders will generate a Dockerfile in the Cloud, so we only
        # need to do this if use_runtime_builders is True
        gen_files.update(_GetDockerfiles(service, source_dir))

    image = docker_image.Image(dockerfile_dir=source_dir,
                               repo=_GetImageName(project, service.module,
                                                  version_id, gcr_domain),
                               nocache=False,
                               tag=config.DOCKER_IMAGE_TAG)

    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD_START)
    object_ref = storage_util.ObjectReference(code_bucket_ref,
                                              image.tagged_repo)

    if files.IsDirAncestorOf(source_dir, service.file):
        relative_yaml_path = os.path.relpath(service.file, source_dir)
    else:
        yaml_contents = files.GetFileContents(service.file)
        checksum = files.Checksum().AddContents(yaml_contents).HexDigest()
        relative_yaml_path = checksum + '.yaml'
        gen_files[relative_yaml_path] = yaml_contents

    try:
        cloud_build.UploadSource(image.dockerfile_dir,
                                 object_ref,
                                 gen_files=gen_files,
                                 skip_files=service.parsed.skip_files.regex)
    except (OSError, IOError) as err:
        if platforms.OperatingSystem.IsWindows():
            if err.filename and len(err.filename) > _WINDOWS_MAX_PATH:
                raise WindowMaxPathError(err.filename)
        raise
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD)

    if use_runtime_builders:
        builder_reference = runtime_builders.FromServiceInfo(
            service, source_dir)
        log.info('Using runtime builder [%s]',
                 builder_reference.build_file_uri)
        builder_reference.WarnIfDeprecated()
        yaml_path = posixpath.join(*relative_yaml_path.split(os.sep))
        build = builder_reference.LoadCloudBuild({
            '_OUTPUT_IMAGE':
            image.tagged_repo,
            '_GAE_APPLICATION_YAML_PATH':
            yaml_path
        })
        # TODO(b/37542869) Remove this hack once the API can take the gs:// path
        # as a runtime name.
        service.runtime = builder_reference.runtime
        service.parsed.SetEffectiveRuntime(builder_reference.runtime)
    else:
        build = cloud_build.GetDefaultBuild(image.tagged_repo)

    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE_START)
    cloudbuild_build.CloudBuildClient().ExecuteCloudBuild(
        cloud_build.FixUpBuild(build, object_ref), project=project)
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE)

    return image.tagged_repo
Esempio n. 22
0
    def Run(self, args):
        super(ConfigSSH, self).Run(args)

        ssh_config_file = os.path.expanduser(args.ssh_config_file
                                             or ssh.PER_USER_SSH_CONFIG_FILE)

        instances = None
        if args.remove:
            compute_section = ''
        else:
            self.EnsureSSHKeyIsInProject(getpass.getuser())
            instances = list(self.GetInstances())
            if instances:
                compute_section = _BuildComputeSection(
                    instances, self.ssh_key_file, ssh.KnownHosts.DEFAULT_PATH)
            else:
                compute_section = ''

        try:
            existing_content = files.GetFileContents(ssh_config_file)
        except files.Error as e:
            existing_content = ''
            log.debug('SSH Config File [{0}] could not be opened: {1}'.format(
                ssh_config_file, e))
        if existing_content:
            section_re = re.compile(_COMPUTE_SECTION_RE,
                                    flags=re.MULTILINE | re.DOTALL)
            match = section_re.search(existing_content)
            if not match:
                # There are no existing Compute Engine sections. If there is
                # at least one instance in the project (signified by
                # compute_section not being None), we append it to the end of
                # the configs. Otherwise, we set content to None which will
                # cause nothing to be written to the SSH config file.
                if compute_section:
                    # Ensures that there is a blank line between the existing
                    # configs and the Compute section.
                    if existing_content[-1] != '\n':
                        existing_content += '\n'
                    if existing_content[-2:] != '\n\n':
                        existing_content += '\n'
                    new_content = existing_content + compute_section

                else:
                    new_content = existing_content

            elif section_re.search(existing_content[match.end(1):]):
                # Multiple Compute Engine sections.
                raise exceptions.ToolException(
                    'Found more than one Google Compute Engine section in [{0}]. '
                    'You can either delete [{0}] and let this command recreate it for '
                    'you or you can manually delete all sections marked with '
                    '[{1}] and [{2}].'.format(ssh_config_file, _BEGIN_MARKER,
                                              _END_MARKER))
            else:
                # One Compute Engine section -- replace it.
                new_content = '{before}{new}{after}'.format(
                    before=existing_content[0:match.start(1)],
                    new=compute_section,
                    after=existing_content[match.end(1):])

        else:
            new_content = compute_section

        if args.dry_run:
            log.out.write(new_content or '')
            return

        if new_content != existing_content:
            if (os.path.exists(ssh_config_file)
                    and platforms.OperatingSystem.Current()
                    is not platforms.OperatingSystem.WINDOWS):
                ssh_config_perms = os.stat(ssh_config_file).st_mode
                # From `man 5 ssh_config`:
                #    this file must have strict permissions: read/write for the user,
                #    and not accessible by others.
                # We check that here:
                if not (ssh_config_perms & stat.S_IRWXU == stat.S_IWUSR
                        | stat.S_IRUSR and ssh_config_perms & stat.S_IWGRP == 0
                        and ssh_config_perms & stat.S_IWOTH == 0):
                    log.warn(
                        'Invalid permissions on [{0}]. Please change to match ssh '
                        'requirements (see man 5 ssh).')
            # TODO(user): This write will not work very well if there is
            # a lot of write contention for the SSH config file. We should
            # add a function to do a better job at "atomic file writes".
            with files.OpenForWritingPrivate(ssh_config_file) as f:
                f.write(new_content)

        if compute_section:
            log.out.write(
                textwrap.dedent("""\
          You should now be able to use ssh/scp with your instances.
          For example, try running:

            $ ssh {alias}

          """.format(alias=_CreateAlias(instances[0]))))

        elif not instances and not args.remove:
            log.warn(
                'No host aliases were added to your SSH configs because you do not '
                'have any instances. Try running this command again after creating '
                'some instances.')
Esempio n. 23
0
    def Run(self, args):
        """See ssh_utils.BaseSSHCommand.Run."""
        super(ConfigSSH, self).Run(args)
        self.keys.EnsureKeysExist(args.force_key_file_overwrite,
                                  allow_passphrase=True)

        ssh_config_file = os.path.expanduser(args.ssh_config_file
                                             or ssh.PER_USER_SSH_CONFIG_FILE)

        instances = None
        try:
            existing_content = files.GetFileContents(ssh_config_file)
        except files.Error as e:
            existing_content = ''
            log.debug('SSH Config File [{0}] could not be opened: {1}'.format(
                ssh_config_file, e))

        if args.remove:
            compute_section = ''
            try:
                new_content = _RemoveComputeSection(existing_content)
            except MultipleComputeSectionsError:
                raise MultipleComputeSectionsError(ssh_config_file)
        else:
            self.EnsureSSHKeyIsInProject(
                ssh.GetDefaultSshUsername(warn_on_account_user=True))
            instances = list(self.GetInstances())
            if instances:
                compute_section = _BuildComputeSection(
                    instances, self.keys.key_file, ssh.KnownHosts.DEFAULT_PATH)
            else:
                compute_section = ''

        if existing_content and not args.remove:
            try:
                new_content = _MergeComputeSections(existing_content,
                                                    compute_section)
            except MultipleComputeSectionsError:
                raise MultipleComputeSectionsError(ssh_config_file)
        elif not existing_content:
            new_content = compute_section

        if args.dry_run:
            log.out.write(new_content or '')
            return

        if new_content != existing_content:
            if (os.path.exists(ssh_config_file)
                    and platforms.OperatingSystem.Current()
                    is not platforms.OperatingSystem.WINDOWS):
                ssh_config_perms = os.stat(ssh_config_file).st_mode
                # From `man 5 ssh_config`:
                #    this file must have strict permissions: read/write for the user,
                #    and not accessible by others.
                # We check that here:
                if not (ssh_config_perms & stat.S_IRWXU == stat.S_IWUSR
                        | stat.S_IRUSR and ssh_config_perms & stat.S_IWGRP == 0
                        and ssh_config_perms & stat.S_IWOTH == 0):
                    log.warn(
                        'Invalid permissions on [{0}]. Please change to match ssh '
                        'requirements (see man 5 ssh).')
            # TODO(b/36050483): This write will not work very well if there is
            # a lot of write contention for the SSH config file. We should
            # add a function to do a better job at "atomic file writes".
            with files.OpenForWritingPrivate(ssh_config_file) as f:
                f.write(new_content)

        if compute_section:
            log.out.write(
                textwrap.dedent("""\
          You should now be able to use ssh/scp with your instances.
          For example, try running:

            $ ssh {alias}

          """.format(alias=_CreateAlias(instances[0]))))

        elif not instances and not args.remove:
            log.warn(
                'No host aliases were added to your SSH configs because you do not '
                'have any instances. Try running this command again after creating '
                'some instances.')