def Run(self, args):
        if (args.ciphertext_file == '-'
                and args.additional_authenticated_data_file == '-'):
            raise exceptions.InvalidArgumentException(
                '--ciphertext-file',
                '--ciphertext-file and --additional-authenticated-data-file cannot '
                'both read from stdin.')

        try:
            # The Encrypt API has a limit of 64K; the output ciphertext files will be
            # slightly larger. Check proactively (but generously) to avoid attempting
            # to buffer and send obviously oversized files to KMS.
            ciphertext = self._ReadFileOrStdin(args.ciphertext_file,
                                               max_bytes=2 * 65536)
        except files.Error as e:
            raise exceptions.BadFileException(
                'Failed to read ciphertext file [{0}]: {1}'.format(
                    args.ciphertext_file, e))

        aad = None
        if args.additional_authenticated_data_file:
            try:
                # The Encrypt API limits the AAD to 64KiB.
                aad = self._ReadFileOrStdin(
                    args.additional_authenticated_data_file, max_bytes=65536)
            except files.Error as e:
                raise exceptions.BadFileException(
                    'Failed to read additional authenticated data file [{0}]: {1}'
                    .format(args.additional_authenticated_data_file, e))

        crypto_key_ref = flags.ParseCryptoKeyName(args)

        client = cloudkms_base.GetClientInstance()
        messages = cloudkms_base.GetMessagesModule()

        req = messages.CloudkmsProjectsLocationsKeyRingsCryptoKeysDecryptRequest(
            name=crypto_key_ref.RelativeName())
        req.decryptRequest = messages.DecryptRequest(
            ciphertext=ciphertext, additionalAuthenticatedData=aad)

        resp = client.projects_locations_keyRings_cryptoKeys.Decrypt(req)

        try:
            log.WriteToFileOrStdout(args.plaintext_file,
                                    resp.plaintext,
                                    binary=True,
                                    overwrite=True)
        except files.Error as e:
            raise exceptions.BadFileException(e)
Ejemplo n.º 2
0
 def Run(self, args):
     if args.json_file:
         try:
             with io.open(args.json_file, 'rt') as f:
                 resources = json.load(f)
         except (IOError, ValueError) as e:
             raise exceptions.BadFileException(
                 'Cannot read [{}]: {}'.format(args.json_file, e))
     else:
         try:
             resources = json.load(sys.stdin)
         except (IOError, ValueError) as e:
             raise exceptions.BadFileException(
                 'Cannot read the standard input: {}'.format(e))
     return resources
Ejemplo n.º 3
0
  def CopyFileFromGCS(self, source_obj_ref, local_path, overwrite=False):
    """Download a file from the given Cloud Storage bucket.

    Args:
      source_obj_ref: storage_util.ObjectReference, the path of the file on GCS
        to download.
      local_path: str, the path of the file to download to. Path must be on the
        local filesystem.
      overwrite: bool, whether or not to overwrite local_path if it already
        exists.

    Raises:
      BadFileException if the file download is not successful.
    """
    chunksize = self._GetChunkSize()
    download = transfer.Download.FromFile(
        local_path, chunksize=chunksize, overwrite=overwrite)
    download.bytes_http = http.Http(response_encoding=None)
    get_req = self.messages.StorageObjectsGetRequest(
        bucket=source_obj_ref.bucket,
        object=source_obj_ref.object)

    gsc_path = '{bucket}/{object_path}'.format(
        bucket=source_obj_ref.bucket, object_path=source_obj_ref.object,
    )

    log.info('Downloading [{gcs}] to [{local_file}]'.format(
        local_file=local_path, gcs=gsc_path))
    try:
      self.client.objects.Get(get_req, download=download)
      # Close the stream to release the file handle so we can check its contents
      download.stream.close()
      # When there's a download, Get() returns None so we Get() again to check
      # the file size.
      response = self.client.objects.Get(get_req)
    except api_exceptions.HttpError as err:
      raise exceptions.BadFileException(
          'Could not copy [{gcs}] to [{local_file}]. Please retry: {err}'
          .format(local_file=local_path, gcs=gsc_path,
                  err=http_exc.HttpException(err)))

    file_size = _GetFileSize(local_path)
    if response.size != file_size:
      log.debug('Download size: {0} bytes, but expected size is {1} '
                'bytes.'.format(file_size, response.size))
      raise exceptions.BadFileException(
          'Cloud Storage download failure. Downloaded file [{0}] does not '
          'match Cloud Storage object. Please retry.'.format(local_path))
Ejemplo n.º 4
0
    def Run(self, args):
        client = cloudkms_base.GetClientInstance()
        messages = cloudkms_base.GetMessagesModule()
        import_job_name = flags.ParseImportJobName(args).RelativeName()

        if bool(args.rsa_aes_wrapped_key_file) == bool(args.target_key_file):
            raise exceptions.OneOfArgumentsRequiredException((
                '--target-key-file', '--rsa-aes-wrapped-key-file'
            ), 'Either a pre-wrapped key or a key to be wrapped must be provided.'
                                                             )

        rsa_aes_wrapped_key_bytes = None
        if args.rsa_aes_wrapped_key_file:
            try:
                # This should be less than 64KiB.
                rsa_aes_wrapped_key_bytes = self._ReadFile(
                    args.rsa_aes_wrapped_key_file, max_bytes=65536)
            except files.Error as e:
                raise exceptions.BadFileException(
                    'Failed to read rsa_aes_wrapped_key_file [{0}]: {1}'.
                    format(args.wrapped_target_key_file, e))

        if args.target_key_file:
            public_key_bytes = self._ReadOrFetchPublicKeyBytes(
                args, import_job_name)
            target_key_bytes = None
            try:
                # This should be less than 64KiB.
                target_key_bytes = self._ReadFile(args.target_key_file,
                                                  max_bytes=8192)
            except files.Error as e:
                raise exceptions.BadFileException(
                    'Failed to read target key file [{0}]: {1}'.format(
                        args.target_key_file, e))
            rsa_aes_wrapped_key_bytes = self._CkmRsaAesKeyWrap(
                public_key_bytes, target_key_bytes)

        # Send the request to KMS.
        req = messages.CloudkmsProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsImportRequest(  # pylint: disable=line-too-long
            parent=flags.ParseCryptoKeyName(args).RelativeName())
        req.importCryptoKeyVersionRequest = messages.ImportCryptoKeyVersionRequest(
            algorithm=maps.ALGORITHM_MAPPER_FOR_IMPORT.GetEnumForChoice(
                args.algorithm),
            importJob=import_job_name,
            rsaAesWrappedKey=rsa_aes_wrapped_key_bytes)

        return client.projects_locations_keyRings_cryptoKeys_cryptoKeyVersions.Import(
            req)
    def Run(self, args):
        req = self._CreateDecryptRequest(args)
        client = cloudkms_base.GetClientInstance()
        try:
            resp = client.projects_locations_keyRings_cryptoKeys.Decrypt(req)
        # Intercept INVALID_ARGUMENT errors related to checksum verification to
        # present a user-friendly message. All other errors are surfaced as-is.
        except apitools_exceptions.HttpBadRequestError as error:
            e2e_integrity.ProcessHttpBadRequestError(error)

        if self._PerformIntegrityVerification(args):
            self._VerifyResponseIntegrityFields(req, resp)

        try:
            if resp.plaintext is None:
                with files.FileWriter(args.plaintext_file):
                    # to create an empty file
                    pass
                log.Print('Decrypted file is empty')
            else:
                log.WriteToFileOrStdout(args.plaintext_file,
                                        resp.plaintext,
                                        binary=True,
                                        overwrite=True)
        except files.Error as e:
            raise exceptions.BadFileException(e)
Ejemplo n.º 6
0
    def _ParsePemChainFromFile(self, pem_chain_file):
        """Parses a pem chain from a file, splitting the leaf cert and chain.

    Args:
      pem_chain_file: file containing the pem_chain.

    Raises:
      exceptions.InvalidArgumentException if not enough certificates are
      included.

    Returns:
      A tuple with (leaf_cert, rest_of_chain)
    """
        try:
            pem_chain_input = files.ReadFileContents(pem_chain_file)
        except (files.Error, OSError, IOError):
            raise exceptions.BadFileException(
                "Could not read provided PEM chain file '{}'.".format(
                    pem_chain_file))

        certs = pem_utils.ValidateAndParsePemChain(pem_chain_input)
        if len(certs) < 2:
            raise exceptions.InvalidArgumentException(
                'pem-chain',
                'The pem_chain must include at least two certificates - the subordinate CA certificate and an issuer certificate.'
            )

        return certs[0], certs[1:]
Ejemplo n.º 7
0
  def CopyFileToGCS(self, bucket_ref, local_path, target_path):
    """Upload a file to the GCS results bucket using the storage API.

    Args:
      bucket_ref: storage_util.BucketReference, The user-specified bucket to
        download from.
      local_path: str, the path of the file to upload. File must be on the local
        filesystem.
      target_path: str, the path of the file on GCS.

    Returns:
      Object, the storage object that was copied to.

    Raises:
      BucketNotFoundError if the user-specified bucket does not exist.
      UploadError if the file upload is not successful.
      exceptions.BadFileException if the uploaded file size does not match the
          size of the local file.
    """
    file_size = _GetFileSize(local_path)
    src_obj = self.messages.Object(size=file_size)
    mime_type = _GetMimetype(local_path)

    chunksize = self._GetChunkSize()
    upload = transfer.Upload.FromFile(
        local_path, mime_type=mime_type, chunksize=chunksize)
    insert_req = self.messages.StorageObjectsInsertRequest(
        bucket=bucket_ref.bucket,
        name=target_path,
        object=src_obj)

    gsc_path = '{bucket}/{target_path}'.format(
        bucket=bucket_ref.bucket, target_path=target_path,
    )

    log.info('Uploading [{local_file}] to [{gcs}]'.format(local_file=local_path,
                                                          gcs=gsc_path))
    try:
      response = self.client.objects.Insert(insert_req, upload=upload)
    except api_exceptions.HttpNotFoundError:
      raise BucketNotFoundError(
          'Could not upload file: [{bucket}] bucket does not exist.'
          .format(bucket=bucket_ref.bucket))
    except api_exceptions.HttpError as err:
      log.debug('Could not upload file [{local_file}] to [{gcs}]: {e}'.format(
          local_file=local_path, gcs=gsc_path,
          e=http_exc.HttpException(err)))
      raise UploadError(
          '{code} Could not upload file [{local_file}] to [{gcs}]: {message}'
          .format(code=err.status_code, local_file=local_path, gcs=gsc_path,
                  message=http_exc.HttpException(
                      err, error_format='{status_message}')))

    if response.size != file_size:
      log.debug('Response size: {0} bytes, but local file is {1} bytes.'.format(
          response.size, file_size))
      raise exceptions.BadFileException(
          'Cloud storage upload failure. Uploaded file does not match local '
          'file: {0}. Please retry.'.format(local_path))
    return response
 def testUploadException(self):
   self.copy_file_mock.side_effect = calliope_exceptions.BadFileException()
   with self.AssertRaisesExceptionMatches(
       exceptions.FileUploadError,
       r"Failed to upload files ['foo', '/tmp/bar', 'baz.txt'] "
       r"to 'gs://foo/bar/'."):
     storage_helpers.Upload(['foo', '/tmp/bar', 'baz.txt'], 'gs://foo/bar/')
Ejemplo n.º 9
0
    def ReadObject(self, object_ref):
        """Read a file from the given Cloud Storage bucket.

    Args:
      object_ref: storage_util.ObjectReference, The object to read from.

    Raises:
      BadFileException if the file read is not successful.

    Returns:
      file-like object containing the data read.
    """
        data = io.BytesIO()
        chunksize = self._GetChunkSize()
        download = transfer.Download.FromStream(data, chunksize=chunksize)
        download.bytes_http = http.Http(response_encoding=None)
        get_req = self.messages.StorageObjectsGetRequest(
            bucket=object_ref.bucket, object=object_ref.object)

        log.info('Reading [%s]', object_ref)
        try:
            self.client.objects.Get(get_req, download=download)
        except api_exceptions.HttpError as err:
            raise exceptions.BadFileException(
                'Could not read [{object_}]. Please retry: {err}'.format(
                    object_=object_ref, err=http_exc.HttpException(err)))

        data.seek(0)
        return data
Ejemplo n.º 10
0
def ParsePolicyFile(policy_file_path, policy_message_type):
    """Construct an IAM Policy protorpc.Message from a JSON or YAML formated file.

  Args:
    policy_file_path: Path to the JSON or YAML IAM policy file.
    policy_message_type: Policy message type to convert JSON or YAML to.
  Returns:
    a protorpc.Message of type policy_message_type filled in from the JSON or
    YAML policy file.
  Raises:
    BadFileException if the JSON or YAML file is malformed.
  """
    try:
        policy = ParseJsonPolicyFile(policy_file_path, policy_message_type)
    except gcloud_exceptions.BadFileException:
        try:
            policy = ParseYamlPolicyFile(policy_file_path, policy_message_type)
        except gcloud_exceptions.BadFileException:
            raise gcloud_exceptions.BadFileException(
                'Policy file {0} is not a properly formatted JSON or YAML policy file'
                '.'.format(policy_file_path))

    if not policy.etag:
        msg = ('The specified policy does not contain an "etag" field '
               'identifying a specific version to replace. Changing a '
               'policy without an "etag" can overwrite concurrent policy '
               'changes.')
        console_io.PromptContinue(message=msg,
                                  prompt_string='Replace existing policy',
                                  cancel_on_no=True)
    return policy
Ejemplo n.º 11
0
def ParseYamlToRole(file_path, role_message_type):
    """Construct an IAM Role protorpc.Message from a Yaml formatted file.

  Args:
    file_path: Path to the Yaml IAM Role file.
    role_message_type: Role message type to convert Yaml to.
  Returns:
    a protorpc.Message of type role_message_type filled in from the Yaml
    role file.
  Raises:
    BadFileException if the Yaml file is malformed or does not exist.
  """
    role_to_parse = yaml.load_path(file_path)
    if 'stage' in role_to_parse:
        role_to_parse['stage'] = role_to_parse['stage'].upper()
    try:
        role = encoding.PyValueToMessage(role_message_type, role_to_parse)
    except (AttributeError) as e:
        # Raised when the YAML file is not properly formatted YAML role file.
        raise gcloud_exceptions.BadFileException(
            'Role file {0} is not a properly formatted YAML role file. {1}'.
            format(file_path, str(e)))
    except (apitools_messages.DecodeError, binascii.Error) as e:
        # DecodeError is raised when etag is badly formatted (not proper Base64)
        raise IamEtagReadError(
            'The etag of role file {0} is not properly formatted. {1}'.format(
                file_path, str(e)))
    return role
 def Run(self, args):
     client = cloudkms_base.GetClientInstance()
     messages = cloudkms_base.GetMessagesModule()
     version_ref = flags.ParseCryptoKeyVersionName(args)
     if not version_ref.Name():
         raise exceptions.InvalidArgumentException(
             'version', 'version id must be non-empty.')
     versions = client.projects_locations_keyRings_cryptoKeys_cryptoKeyVersions
     version = versions.Get(
         messages.
         CloudkmsProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsGetRequest(
             name=version_ref.RelativeName()))
     if (version.protectionLevel !=
             messages.CryptoKeyVersion.ProtectionLevelValueValuesEnum.HSM):
         raise exceptions.ToolException(
             'Certificate chains are only available for HSM key versions.')
     if (version.state == messages.CryptoKeyVersion.StateValueValuesEnum.
             PENDING_GENERATION):
         raise exceptions.ToolException(
             'Certificate chains are unavailable until the version is generated.'
         )
     try:
         log.WriteToFileOrStdout(
             args.output_file if args.output_file else '-',
             _GetCertificateChainPem(version.attestation.certChains,
                                     args.certificate_chain_type),
             overwrite=True,
             binary=False)
     except files.Error as e:
         raise exceptions.BadFileException(e)
Ejemplo n.º 13
0
 def _ReadFile(self, path, max_bytes):
     data = files.ReadBinaryFileContents(path)
     if len(data) > max_bytes:
         raise exceptions.BadFileException(
             'The file is larger than the maximum size of {0} bytes.'.
             format(max_bytes))
     return data
Ejemplo n.º 14
0
    def ReadObject(self, object_ref):
        """Read a file from the given Cloud Storage bucket.

    Args:
      object_ref: storage_util.ObjectReference, The object to read from.

    Raises:
      BadFileException if the file read is not successful.

    Returns:
      file-like object containing the data read.
    """
        data = cStringIO.StringIO()
        download = transfer.Download.FromStream(data)
        get_req = self.messages.StorageObjectsGetRequest(
            bucket=object_ref.bucket, object=object_ref.name)

        log.info('Reading [%s]', object_ref)
        try:
            self.client.objects.Get(get_req, download=download)
        except api_exceptions.HttpError as err:
            raise exceptions.BadFileException(
                'Could not read [{object_}]: {err}. Please retry.'.format(
                    object_=object_ref, err=err))

        data.seek(0)
        return data
def _VerifySkaffoldIsInFolder(source, skaffold_file):
  """Checks that the specified source folder contains a skaffold configuration file."""
  path_to_skaffold = os.path.join(source, skaffold_file)
  if not os.path.exists(path_to_skaffold):
    raise c_exceptions.BadFileException(
        'Could not find skaffold file. File [{skaffold}] does not exist'.format(
            skaffold=path_to_skaffold))
def _VerifySkaffoldIsInArchive(source, skaffold_file):
  """Checks that the specified source file is a readable archive with skaffold file present."""
  _, ext = os.path.splitext(source)
  if ext not in _ALLOWED_SOURCE_EXT:
    raise c_exceptions.BadFileException('local file [{src}] is none of ' +
                                        ', '.join(_ALLOWED_SOURCE_EXT))
  if not tarfile.is_tarfile(source):
    raise c_exceptions.BadFileException(
        'Specified source file is not a readable compressed file archive')
  with tarfile.open(source, mode='r:gz') as archive:
    try:
      archive.getmember(skaffold_file)
    except KeyError:
      raise c_exceptions.BadFileException(
          'Could not find skaffold file. File [{skaffold}] does not exist in source archive'
          .format(skaffold=skaffold_file))
def _ExportExternalAccountKey(external_account_key, key_output_file):
    try:
        files.WriteFileContents(key_output_file, external_account_key)
    except (files.Error, OSError, IOError):
        raise exceptions.BadFileException(
            "Could not write external account key to '{}'.".format(
                key_output_file))
Ejemplo n.º 18
0
def ParseReplicationFileContents(file_contents):
    """Reads replication policy file contents and returns its data.

  Reads the contents of a json or yaml replication policy file which conforms to
  https://cloud.google.com/secret-manager/docs/reference/rest/v1/projects.secrets#replication
  and returns data needed to create a Secret with that policy. If the file
  doesn't conform to the expected format, a BadFileException is raised.

  For Secrets with an automtic policy, locations is empty and keys has
  either 0 or 1 entry depending on whether the policy includes CMEK. For Secrets
  with a user managed policy, the number of keys returns is either 0 or is equal
  to the number of locations returned with the Nth key corresponding to the Nth
  location.

  Args:
      file_contents (str): The unvalidated contents fo the replication file.

  Returns:
      result (str): Either "user-managed" or "automatic".
      locations (list): Locations that are part of the user-managed replication
      keys (list): list of kms keys to be used for each replica.
  """
    try:
        replication_policy = json.loads(file_contents)
        return _ParseReplicationDict(replication_policy)
    except ValueError:
        # Assume that this is yaml.
        pass
    try:
        replication_policy = yaml.load(file_contents)
        return _ParseReplicationDict(replication_policy)
    except yaml.YAMLParseError:
        raise exceptions.BadFileException(
            'Failed to parse replication policy file as json or yaml.')
Ejemplo n.º 19
0
def ParseYamlOrJsonPolicyFile(policy_file_path, policy_message_type):
    """Create an IAM Policy protorpc.Message from a YAML or JSON formatted file.

  Returns the parsed policy object and FieldMask derived from input dict.
  Args:
    policy_file_path: Path to the YAML or JSON IAM policy file.
    policy_message_type: Policy message type to convert YAML to.
  Returns:
    a tuple of (policy, updateMask) where policy is a protorpc.Message of type
    policy_message_type filled in from the JSON or YAML policy file and
    updateMask is a FieldMask containing policy fields to be modified, based on
    which fields are present in the input file.
  Raises:
    BadFileException if the YAML or JSON file is malformed.
    IamEtagReadError if the etag is badly formatted.
  """
    policy_to_parse = yaml.load_path(policy_file_path)
    try:
        policy = encoding.PyValueToMessage(policy_message_type,
                                           policy_to_parse)
        update_mask = ','.join(sorted(policy_to_parse.keys()))
    except (AttributeError) as e:
        # Raised when the input file is not properly formatted YAML policy file.
        raise gcloud_exceptions.BadFileException(
            'Policy file [{0}] is not a properly formatted YAML or JSON '
            'policy file. {1}'.format(policy_file_path, str(e)))
    except (apitools_messages.DecodeError, binascii.Error) as e:
        # DecodeError is raised when etag is badly formatted (not proper Base64)
        raise IamEtagReadError(
            'The etag of policy file [{0}] is not properly formatted. {1}'.
            format(policy_file_path, str(e)))
    return (policy, update_mask)
Ejemplo n.º 20
0
    def Run(self, args):
        # Get the security policy.
        holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
        ref = self.SECURITY_POLICY_ARG.ResolveAsResource(
            args, holder.resources)

        requests = []
        security_policy = client.SecurityPolicy(ref,
                                                compute_client=holder.client)
        requests.extend(security_policy.Describe(only_generate_request=True))
        resources = holder.client.MakeRequests(requests)

        # Export the security policy.
        try:
            with open(args.file_name, 'w') as export_file:
                if args.file_format == 'json':
                    security_policies_utils.WriteToFile(
                        export_file, resources[0], 'json')
                else:
                    security_policies_utils.WriteToFile(
                        export_file, resources[0], 'yaml')
        except EnvironmentError as exp:
            msg = 'Unable to export security policy to file [{0}]: {1}'.format(
                args.file_name, exp)
            raise exceptions.BadFileException(msg)

        log.status.Print('Exported security policy to [{0}].'.format(
            args.file_name))
Ejemplo n.º 21
0
def LoadTPUResourceSpecs(custom_help=None):
    """Read Yaml resource file and return a dict mapping name to resource spec."""
    resource_file_contents = pkg_resources.GetResource(TPU_YAML_RESOURCE_PATH,
                                                       'resources.yaml')
    if not resource_file_contents:
        raise calliope_exceptions.BadFileException(
            'Resources not found in path [{}]'.format(TPU_YAML_RESOURCE_PATH))

    resource_dict = yaml.load(resource_file_contents)
    resource_specs = []
    for resource_name in TPU_YAML_SPEC_TEMPLATE:
        spec = resource_dict.get(resource_name, None)
        if not spec:
            raise ValueError(
                'Resource spec [{}] not found in resource spec {}.yaml'.format(
                    resource_name, TPU_YAML_RESOURCE_PATH))

        # Don't modify template
        temp_spec = copy.deepcopy(TPU_YAML_SPEC_TEMPLATE[resource_name])

        temp_spec['spec'] = spec
        if custom_help and custom_help.get(resource_name):
            temp_spec['help_text'] = custom_help[resource_name]
        resource_specs.append(
            resource_arg_schema.YAMLResourceArgument.FromData(temp_spec))
    return resource_specs
Ejemplo n.º 22
0
 def _ReadFileOrStdin(self, path, max_bytes):
     data = console_io.ReadFromFileOrStdin(path, binary=True)
     if len(data) > max_bytes:
         raise exceptions.BadFileException(
             'The file [{0}] is larger than the maximum size of {1} bytes.'.
             format(path, max_bytes))
     return data
Ejemplo n.º 23
0
def ParseJsonPolicyFile(policy_file_path, policy_message_type):
    """Construct an IAM Policy protorpc.Message from a JSON formated file.

  Args:
    policy_file_path: Path to the JSON IAM policy file.
    policy_message_type: Policy message type to convert JSON to.
  Returns:
    a protorpc.Message of type policy_message_type filled in from the JSON
    policy file.
  Raises:
    BadFileException if the JSON file is malformed.
    IamEtagReadError if the etag is badly formatted.
  """
    try:
        with open(policy_file_path) as policy_file:
            policy_json = policy_file.read()
    except EnvironmentError:
        # EnvironmnetError is parent of IOError, OSError and WindowsError.
        # Raised when file does not exist or can't be opened/read.
        raise core_exceptions.Error(
            'Unable to read policy file {0}'.format(policy_file_path))

    try:
        policy = encoding.JsonToMessage(policy_message_type, policy_json)
    except (ValueError) as e:
        # ValueError is raised when JSON is badly formatted
        raise gcloud_exceptions.BadFileException(
            'Policy file {0} is not a properly formatted JSON policy file. {1}'
            .format(policy_file_path, str(e)))
    except (apitools_messages.DecodeError) as e:
        # DecodeError is raised when etag is badly formatted (not proper Base64)
        raise IamEtagReadError(
            'The etag of policy file {0} is not properly formatted. {1}'.
            format(policy_file_path, str(e)))
    return policy
Ejemplo n.º 24
0
def GetArgsFromArgFile(argspec, all_test_args_set):
    """Loads a group of test args from an optional user-supplied arg file.

  Args:
    argspec: string containing an ARG_FILE:ARG_GROUP_NAME pair, where ARG_FILE
      is the path to a file containing groups of test arguments in yaml format,
      and ARG_GROUP_NAME is a yaml object name of a group of arg:value pairs.
    all_test_args_set: a set of strings for every possible gcloud-test argument
      name regardless of test type. Used for validation.

  Returns:
    A {str:str} dict created from the file which maps arg names to arg values.

  Raises:
    BadFileException: the YAML parser encountered an I/O error or syntax error
      while reading the arg-file.
    ToolException: an argument name was not a valid gcloud test arg.
    InvalidArgException: an argument has an invalid value or no value.
  """
    if argspec is None:
        return {}

    arg_file, group_name = _SplitArgFileAndGroup(argspec)
    try:
        all_arg_groups = _ReadArgGroupsFromFile(arg_file)
    except IOError as err:
        raise exceptions.BadFileException(
            'Error reading argument file [{f}]: {e}'.format(f=arg_file, e=err))
    _ValidateArgGroupNames(all_arg_groups.keys())

    args_from_file = {}
    _MergeArgGroupIntoArgs(args_from_file, group_name, all_arg_groups,
                           all_test_args_set)
    log.info('Args loaded from file: ' + str(args_from_file))
    return args_from_file
Ejemplo n.º 25
0
def AddQuotaProjectToADC(quota_project):
    """Adds the quota project to the existing ADC file.

  Quota project is only added to ADC when the credentials have the
  "serviceusage.services.use" permission on the project.

  Args:
    quota_project: str, The project id of a valid GCP project to add to ADC.

  Raises:
    MissingPermissionOnQuotaProjectError: If the credentials do not have the
      "serviceusage.services.use" permission.
  """
    AssertADCExists()
    if not ADCIsUserAccount():
        raise c_exc.BadFileException(
            'The application default credentials are not user credentials, quota '
            'project cannot be added.')
    if not AdcHasGivenPermissionOnProject(
            quota_project, permissions=[SERVICEUSAGE_PERMISSION]):
        raise MissingPermissionOnQuotaProjectError(
            'Cannot add the project "{}" to application default credentials (ADC) '
            'as a quota project because the account in ADC does not have the '
            '"{}" permission on this project.'.format(quota_project,
                                                      SERVICEUSAGE_PERMISSION))
    credentials = client.GoogleCredentials.from_stream(config.ADCFilePath())
    adc_path = c_creds.ADC(credentials).DumpExtendedADCToFile(
        quota_project=quota_project)
    LogADCIsWritten(adc_path)
    LogQuotaProjectAdded(quota_project)
Ejemplo n.º 26
0
 def _WritePemChain(self, pem_cert, issuing_chain, cert_file):
     try:
         pem_chain = [pem_cert] + issuing_chain
         files.WriteFileContents(cert_file, '\n'.join(pem_chain))
     except (files.Error, OSError, IOError):
         raise exceptions.BadFileException(
             "Could not write certificate to '{}'.".format(cert_file))
Ejemplo n.º 27
0
def _ReadArgGroupsFromFile(arg_file):
    """Collects all the arg groups defined in the yaml file into a dictionary.

  Each dictionary key is an arg-group name whose corresponding value is a nested
  dictionary containing arg-name: arg-value pairs defined in that group.

  Args:
    arg_file: str, the name of the YAML argument file to open and parse.

  Returns:
    A dict containing all arg-groups found in the arg_file.

  Raises:
    BadFileException: the yaml package encountered a ScannerError.
  """
    # TODO(user): add support for reading arg files in GCS.
    # TODO(user): add support for reading from stdin.
    with open(arg_file, 'r') as data:
        yaml_generator = yaml.safe_load_all(data)
        all_groups = {}
        try:
            for d in yaml_generator:
                if d is None:
                    log.warning('Ignoring empty yaml document.')
                elif isinstance(d, dict):
                    all_groups.update(d)
                else:
                    raise yaml.scanner.ScannerError(
                        '[{0}] is not a valid argument group.'.format(str(d)))
        except yaml.scanner.ScannerError as error:
            raise exceptions.BadFileException(
                'Error parsing YAML file [{0}]: {1}'.format(
                    arg_file, str(error)))
    return all_groups
Ejemplo n.º 28
0
def ParseYamlorJsonPolicyFile(policy_file_path, policy_message_type):
    """Create an IAM Policy protorpc.Message from a YAML or JSON formatted file.

  Args:
    policy_file_path: Path to the YAML or JSON IAM policy file.
    policy_message_type: Policy message type to convert YAML to.
  Returns:
    a protorpc.Message of type policy_message_type filled in from the input
    policy file.
  Raises:
    BadFileException if the YAML or JSON file is malformed.
    IamEtagReadError if the etag is badly formatted.
  """
    policy_to_parse = yaml.load_path(policy_file_path)
    try:
        policy = encoding.PyValueToMessage(policy_message_type,
                                           policy_to_parse)
    except (AttributeError) as e:
        # Raised when the input file is not properly formatted YAML policy file.
        raise gcloud_exceptions.BadFileException(
            'Policy file [{0}] is not a properly formatted YAML or JSON '
            'policy file. {1}'.format(policy_file_path, str(e)))
    except (apitools_messages.DecodeError) as e:
        # DecodeError is raised when etag is badly formatted (not proper Base64)
        raise IamEtagReadError(
            'The etag of policy file [{0}] is not properly formatted. {1}'.
            format(policy_file_path, str(e)))
    return policy
Ejemplo n.º 29
0
def _SplitBucketAndObject(gcs_path):
  """Split a GCS path into bucket & object tokens, or raise BadFileException."""
  tokens = gcs_path[len(GCS_PREFIX):].strip('/').split('/', 1)
  if len(tokens) != 2:
    raise calliope_exceptions.BadFileException(
        '[{0}] is not a valid Google Cloud Storage path'.format(gcs_path))
  return tokens
Ejemplo n.º 30
0
def _ReadArgGroupsFromFile(arg_file):
    """Collects all the arg groups defined in the yaml file into a dictionary.

  Each dictionary key is an arg-group name whose corresponding value is a nested
  dictionary containing arg-name: arg-value pairs defined in that group.

  Args:
    arg_file: str, the name of the YAML argument file to open and parse.

  Returns:
    A dict containing all arg-groups found in the arg_file.

  Raises:
    yaml.Error: If the YAML file could not be read or parsed.
    BadFileException: If the contents of the file are not valid.
  """
    all_groups = {}
    for d in yaml.load_all_path(arg_file):
        if d is None:
            log.warning('Ignoring empty yaml document.')
        elif isinstance(d, dict):
            all_groups.update(d)
        else:
            raise calliope_exceptions.BadFileException(
                'Failed to parse YAML file [{}]: [{}] is not a valid argument '
                'group.'.format(arg_file, str(d)))
    return all_groups