示例#1
0
  def Run(self, args):
    project_ref = resources.REGISTRY.Parse(
        properties.VALUES.core.project.Get(required=True),
        collection='cloudresourcemanager.projects',
    )
    normalized_artifact_url = binauthz_command_util.NormalizeArtifactUrl(
        args.artifact_url)
    signature = console_io.ReadFromFileOrStdin(args.signature_file, binary=True)
    if args.payload_file:
      payload = files.ReadBinaryFileContents(args.payload_file)
    else:
      payload = binauthz_command_util.MakeSignaturePayload(
          normalized_artifact_url)

    attestor_ref = args.CONCEPTS.attestor.Parse()
    api_version = apis.GetApiVersion(self.ReleaseTrack())
    attestor = attestors.Client(api_version).Get(attestor_ref)
    # TODO(b/79709480): Add other types of attestors if/when supported.
    note_ref = resources.REGISTRY.ParseResourceId(
        'containeranalysis.projects.notes',
        attestor.userOwnedDrydockNote.noteReference, {})

    return containeranalysis.Client().CreateGenericAttestationOccurrence(
        project_ref=project_ref,
        note_ref=note_ref,
        artifact_url=normalized_artifact_url,
        public_key_id=args.public_key_id,
        signature=signature,
        plaintext=payload,
    )
示例#2
0
 def _ReadFileOrStdin(self, path, max_bytes):
     data = console_io.ReadFromFileOrStdin(path, binary=True)
     if len(data) > max_bytes:
         raise exceptions.BadFileException(
             'The file [{0}] is larger than the maximum size of {1} bytes.'.
             format(path, max_bytes))
     return data
示例#3
0
    def Run(self, args):
        """Runs the command.

    Args:
      args: argparse.Namespace, An object that contains the values for the
        arguments specified in the .Args() method.

    Returns:
      A response object returned by rpc call Validate.
    """
        project = properties.VALUES.core.project.GetOrFail()
        holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
        client = holder.client

        # Import UrlMap to be verified
        data = console_io.ReadFromFileOrStdin(args.source, binary=False)
        try:
            url_map = export_util.Import(message_type=client.messages.UrlMap,
                                         stream=data,
                                         schema_path=_GetSchemaPath(
                                             self.ReleaseTrack()))
        except yaml_validator.ValidationError as e:
            raise exceptions.ToolException(str(e))

        # Send UrlMap.validate request
        if args.region is not None:
            return _SendRegionalRequest(client, project, args.region, url_map)
        return _SendGlobalRequest(client, project, url_map)
示例#4
0
    def Run(self, args):
        project_ref = resources.REGISTRY.Parse(
            properties.VALUES.core.project.Get(required=True),
            collection='cloudresourcemanager.projects',
        )
        normalized_artifact_url = binauthz_command_util.NormalizeArtifactUrl(
            args.artifact_url)
        signature = console_io.ReadFromFileOrStdin(args.signature_file,
                                                   binary=False)

        attestor_ref = args.CONCEPTS.attestor.Parse()
        api_version = apis.GetApiVersion(self.ReleaseTrack())
        attestor = authorities.Client(api_version).Get(attestor_ref)
        # TODO(b/79709480): Add other types of attestors if/when supported.
        note_ref = resources.REGISTRY.ParseResourceId(
            'containeranalysis.projects.notes',
            attestor.userOwnedDrydockNote.noteReference, {})

        client = binauthz_api_util.ContainerAnalysisClient()
        return client.CreateAttestationOccurrence(
            project_ref=project_ref,
            note_ref=note_ref,
            artifact_url=normalized_artifact_url,
            pgp_key_fingerprint=args.pgp_key_fingerprint,
            signature=signature,
        )
示例#5
0
def ReadFileOrStdin(path, max_bytes=None):
    """Read data from the given file path or from stdin.

  This is similar to the cloudsdk built in ReadFromFileOrStdin, except that it
  limits the total size of the file and it returns None if given a None path.
  This makes the API in command surfaces a bit cleaner.

  Args:
      path (str): path to the file on disk or "-" for stdin
      max_bytes (int): maximum number of bytes

  Returns:
      result (str): result of reading the file
  """
    if not path:
        return None

    max_bytes = max_bytes or DEFAULT_MAX_BYTES

    try:
        data = console_io.ReadFromFileOrStdin(path, binary=True)
        if len(data) > max_bytes:
            raise exceptions.BadFileException(
                'The file [{path}] is larger than the maximum size of {max_bytes} '
                'bytes.'.format(path=path, max_bytes=max_bytes))
        return data
    except files.Error as e:
        raise exceptions.BadFileException(
            'Failed to read file [{path}]: {e}'.format(path=path, e=e))
  def _ParseMysqlSourceConfig(self, mysql_source_config_file, release_track):
    """Parses a mysql_sorce_config into the MysqlSourceConfig message."""
    data = console_io.ReadFromFileOrStdin(
        mysql_source_config_file, binary=False)
    try:
      mysql_sorce_config_head_data = yaml.load(data)
    except Exception as e:
      raise ds_exceptions.ParseError('Cannot parse YAML:[{0}]'.format(e))

    mysql_sorce_config_data_object = mysql_sorce_config_head_data.get(
        'mysql_source_config')
    mysql_rdbms_data = mysql_sorce_config_data_object if mysql_sorce_config_data_object else mysql_sorce_config_head_data

    include_objects_raw = mysql_rdbms_data.get(
        util.GetRDBMSV1alpha1ToV1FieldName('allowlist', release_track), {})
    include_objects_data = util.ParseMysqlSchemasListToMysqlRdbmsMessage(
        self._messages, include_objects_raw, release_track)

    exclude_objects_raw = mysql_rdbms_data.get(
        util.GetRDBMSV1alpha1ToV1FieldName('rejectlist', release_track), {})
    exclude_objects_data = util.ParseMysqlSchemasListToMysqlRdbmsMessage(
        self._messages, exclude_objects_raw, release_track)

    mysql_sourec_config_msg = self._messages.MysqlSourceConfig(
        includeObjects=include_objects_data,
        excludeObjects=exclude_objects_data)
    return mysql_sourec_config_msg
示例#7
0
  def Run(self, args):
    holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
    client = holder.client

    backend_service_ref = (
        flags.GLOBAL_REGIONAL_BACKEND_SERVICE_ARG.ResolveAsResource(
            args,
            holder.resources,
            scope_lister=compute_flags.GetDefaultScopeLister(client)))

    data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)

    try:
      backend_service = export_util.Import(
          message_type=client.messages.BackendService,
          stream=data,
          schema_path=self.GetSchemaPath())
    except yaml_validator.ValidationError as e:
      raise exceptions.ToolException(e.message)

    # Get existing backend service.
    try:
      backend_service_old = backend_services_utils.SendGetRequest(
          client, backend_service_ref)
    except apitools_exceptions.HttpError as error:
      if error.status_code != 404:
        raise error
      # Backend service does not exist, create a new one.
      return self.SendInsertRequest(client, backend_service_ref,
                                    backend_service)

    # No change, do not send requests to server.
    if backend_service_old == backend_service:
      return

    console_io.PromptContinue(
        message=('Backend Service [{0}] will be overwritten.').format(
            backend_service_ref.Name()),
        cancel_on_no=True)

    # populate id and fingerprint fields. These two fields are manually
    # removed from the schema files.
    backend_service.id = backend_service_old.id
    backend_service.fingerprint = backend_service_old.fingerprint

    # Unspecified fields are assumed to be cleared.
    cleared_fields = []
    if hasattr(backend_service, 'securitySettings') is None:
      cleared_fields.append('securitySettings')
    if hasattr(backend_service, 'localityLbPolicy') is None:
      cleared_fields.append('localityLbPolicy')
    if hasattr(backend_service, 'circuitBreakers') is None:
      cleared_fields.append('circuitBreakers')
    if hasattr(backend_service, 'consistentHash') is None:
      cleared_fields.append('consistentHash')
    if hasattr(backend_service, 'outlierDetection') is None:
      cleared_fields.append('outlierDetection')

    with client.apitools_client.IncludeFields(cleared_fields):
      return self.SendPatchRequest(client, backend_service_ref, backend_service)
def ReadInstanceFromArgs(path):
  """Reads the instance from the given file path ('-' for stdin).

  Args:
    path: str or None, a path to a file ('-' for stdin) containing the JSON
      body.

  Returns:
    A instance.

  Raises:
    InvalidInstancesFileError: If the input file is invalid (invalid format or
        contains too many/zero instances), or an improper combination of input
        files was given.
  """
  data = console_io.ReadFromFileOrStdin(path, binary=True)
  with io.BytesIO(data) as f:
    try:
      instance = yaml.load(f)
    except ValueError:
      raise errors.InvalidInstancesFileError(
          'Input instance are not in JSON format. '
          'See `gcloud ai model-monitoring-jobs create --help` for details.')

    if not isinstance(instance, dict):
      raise errors.InvalidInstancesFileError(
          'Input instance are not in JSON format. '
          'See `gcloud ai model-monitoring-jobs create --help` for details.')

    return instance
示例#9
0
 def Run(self, args):
     dataproc = dp.Dataproc(self.ReleaseTrack())
     data = console_io.ReadFromFileOrStdin(args.file or '-', binary=False)
     cluster = export_util.Import(message_type=dataproc.messages.Cluster,
                                  stream=data)
     return clusters.CreateCluster(dataproc, cluster, args. async,
                                   args.timeout)
示例#10
0
  def Run(self, args):
    try:
      ciphertext = console_io.ReadFromFileOrStdin(
          args.ciphertext_file, binary=True)
    except files.Error as e:
      raise exceptions.BadFileException(
          'Failed to read ciphertext file [{0}]: {1}'.format(
              args.ciphertext_file, e))

    client = cloudkms_base.GetClientInstance()
    messages = cloudkms_base.GetMessagesModule()
    crypto_key_ref = flags.ParseCryptoKeyVersionName(args)

    req = messages.CloudkmsProjectsLocationsKeyRingsCryptoKeysCryptoKeyVersionsAsymmetricDecryptRequest(  # pylint: disable=line-too-long
        name=crypto_key_ref.RelativeName())
    req.asymmetricDecryptRequest = messages.AsymmetricDecryptRequest(
        ciphertext=ciphertext)

    resp = (
        client.projects_locations_keyRings_cryptoKeys_cryptoKeyVersions.
        AsymmetricDecrypt(req))

    try:
      log.WriteToFileOrStdout(
          args.plaintext_file,
          resp.plaintext or '',
          overwrite=True,
          binary=True,
          private=True)
    except files.Error as e:
      raise exceptions.BadFileException(e)
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())
        msgs = dataproc.messages

        # Generate uuid for request.
        instance_id = uuid.uuid4().hex
        regions_ref = dp_util.ParseRegion(dataproc)

        data = console_io.ReadFromFileOrStdin(args.file or '-', binary=False)
        template = export_util.Import(message_type=msgs.WorkflowTemplate,
                                      stream=data)

        # Send instantiate inline request.
        request = \
          msgs.DataprocProjectsRegionsWorkflowTemplatesInstantiateInlineRequest(
              parent=regions_ref.RelativeName(),
              workflowTemplate=template)

        if dataproc.api_version == 'v1':
            # Deprecated field in v1beta2
            request.requestId = instance_id
        else:
            # new field not in v1
            request.instanceId = instance_id

        operation = \
          dataproc.client.projects_regions_workflowTemplates.InstantiateInline(
              request)
        if args.async_:
            log.status.Print('Instantiating with operation [{0}].'.format(
                operation.name))
            return
        operation = dp_util.WaitForWorkflowTemplateOperation(
            dataproc, operation)
        return operation
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())
        msgs = dataproc.messages

        # Generate uuid for request.
        instance_id = uuid.uuid4().hex
        regions_ref = dp_util.ParseRegion(dataproc)
        # Read template from YAML file and validate it using a schema.
        data = console_io.ReadFromFileOrStdin(args.file or '-', binary=False)
        template = export_util.Import(message_type=msgs.WorkflowTemplate,
                                      stream=data,
                                      schema_path=export_util.GetSchemaPath(
                                          'dataproc',
                                          api_version='v1beta2',
                                          message_name='WorkflowTemplate'))

        # Send instantiate inline request.
        request = \
          msgs.DataprocProjectsRegionsWorkflowTemplatesInstantiateInlineRequest(
              instanceId=instance_id,
              parent=regions_ref.RelativeName(),
              workflowTemplate=template)
        operation = \
          dataproc.client.projects_regions_workflowTemplates.InstantiateInline(
              request)
        if args.async_:
            log.status.Print('Instantiating with operation [{0}].'.format(
                operation.name))
            return
        operation = dp_util.WaitForWorkflowTemplateOperation(
            dataproc, operation)
        return operation
示例#13
0
def _Run(args, version):
    """Run Vertex AI online prediction."""
    endpoint_ref = args.CONCEPTS.endpoint.Parse()
    args.region = endpoint_ref.AsDict()['locationsId']

    with endpoint_util.AiplatformEndpointOverrides(version,
                                                   region=args.region):
        if args.request.startswith('@'):
            request = console_io.ReadFromFileOrStdin(args.request[1:],
                                                     binary=True)
        else:
            request = args.request.encode('utf-8')

        endpoints_client = client.EndpointsClient(version=version)
        _, response = endpoints_client.RawPredict(endpoint_ref,
                                                  args.http_headers, request)

        # Workaround since gcloud only supports protobufs as JSON objects. Since
        # raw predict can return anything, write raw bytes to stdout.
        if not args.IsSpecified('format'):
            sys.stdout.buffer.write(response)
            return None

        # If user asked for formatting, assume it's a JSON object.
        try:
            return json.loads(response.decode('utf-8'))
        except ValueError:
            raise core_exceptions.Error(
                'No JSON object could be decoded from the '
                'HTTP response body:\n' + six.text_type(response))
示例#14
0
  def Run(self, args):
    dataproc = dp.Dataproc(self.ReleaseTrack())
    msgs = dataproc.messages

    # Generate uuid for request.
    instance_id = uuid.uuid4().hex
    regions_ref = dp_util.ParseRegion(dataproc)

    if args.file.startswith('gs://'):
      data = storage_helpers.ReadObject(args.file)
    else:
      data = console_io.ReadFromFileOrStdin(args.file, binary=False)
    template = export_util.Import(
        message_type=msgs.WorkflowTemplate, stream=data)

    # Send instantiate inline request.
    request = \
      msgs.DataprocProjectsRegionsWorkflowTemplatesInstantiateInlineRequest(
          parent=regions_ref.RelativeName(),
          workflowTemplate=template)

    request.requestId = instance_id

    operation = \
      dataproc.client.projects_regions_workflowTemplates.InstantiateInline(
          request)
    if args.async_:
      log.status.Print('Instantiating with operation [{0}].'.format(
          operation.name))
      return
    operation = dp_util.WaitForWorkflowTemplateOperation(dataproc, operation)
    return operation
  def _ParseGcsDestinationConfig(self, gcs_destination_config_file):
    """Parses a gcs_destination_config into the GcsDestinationConfig message."""
    data = console_io.ReadFromFileOrStdin(
        gcs_destination_config_file, binary=False)
    try:
      gcs_destination_head_config_data = yaml.load(data)
    except Exception as e:
      raise ds_exceptions.ParseError('Cannot parse YAML:[{0}]'.format(e))

    gcs_destination_config_data_object = gcs_destination_head_config_data.get(
        'gcs_destination_config')
    gcs_destination_config_data = gcs_destination_config_data_object if gcs_destination_config_data_object else gcs_destination_head_config_data

    path = gcs_destination_config_data.get('path', '')
    file_rotation_mb = gcs_destination_config_data.get('file_rotation_mb', {})
    file_rotation_interval = gcs_destination_config_data.get(
        'file_rotation_interval', {})
    gcs_dest_config_msg = self._messages.GcsDestinationConfig(
        path=path, fileRotationMb=file_rotation_mb,
        fileRotationInterval=file_rotation_interval)
    if 'avro_file_format' in gcs_destination_config_data:
      gcs_dest_config_msg.avroFileFormat = self._messages.AvroFileFormat()
    elif 'json_file_format' in gcs_destination_config_data:
      json_file_format_data = gcs_destination_config_data.get(
          'json_file_format')
      gcs_dest_config_msg.jsonFileFormat = self._messages.JsonFileFormat(
          compression=json_file_format_data.get('compression'),
          schemaFileFormat=json_file_format_data.get('schema_file_format'))
    else:
      raise ds_exceptions.ParseError(
          'Cannot parse YAML: missing file format.')
    return gcs_dest_config_msg
示例#16
0
def _Run(args, holder, target_https_proxy_arg, release_track):
    """Issues requests necessary to import target HTTPS proxies."""
    client = holder.client

    target_https_proxy_ref = target_https_proxy_arg.ResolveAsResource(
        args,
        holder.resources,
        default_scope=compute_scope.ScopeEnum.GLOBAL,
        scope_lister=compute_flags.GetDefaultScopeLister(client))

    data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)

    try:
        target_https_proxy = export_util.Import(
            message_type=client.messages.TargetHttpsProxy,
            stream=data,
            schema_path=_GetSchemaPath(release_track))
    except yaml_validator.ValidationError as e:
        raise exceptions.ToolException(str(e))

    # Get existing target HTTPS proxy.
    try:
        target_https_proxies_utils.SendGetRequest(client,
                                                  target_https_proxy_ref)
    except apitools_exceptions.HttpError as error:
        if error.status_code != 404:
            raise error
        # Target HTTPS proxy does not exist, create a new one.
        return _SendInsertRequest(client, target_https_proxy_ref,
                                  target_https_proxy)

    console_message = ('Target HTTPS Proxy [{0}] cannot be updated'.format(
        target_https_proxy_ref.Name()))
    raise NotImplementedError(console_message)
示例#17
0
def _ParseBodyArgs(args):
    if args.IsSpecified('body_file'):
        body = console_io.ReadFromFileOrStdin(args.body_file, binary=False)
    elif args.IsSpecified('body_content'):
        body = args.body_content
    else:
        return None
    return http_encoding.Encode(body)
示例#18
0
def _ParsePayloadArgs(args):
  if args.IsSpecified('payload_file'):
    payload = console_io.ReadFromFileOrStdin(args.payload_file, binary=False)
  elif args.IsSpecified('payload_content'):
    payload = args.payload_content
  else:
    return None
  return http_encoding.Encode(payload)
示例#19
0
 def testFileReadBinary(self):
     filename = self.Touch(
         self.temp_path,
         contents=
         b'\xc3\x9c\xc3\xb1\xc3\xae\xc3\xa7\xc3\xb2\xc3\x90\xc3\xa9\n')
     contents = console_io.ReadFromFileOrStdin(filename, binary=True)
     self.assertEqual(
         contents,
         b'\xc3\x9c\xc3\xb1\xc3\xae\xc3\xa7\xc3\xb2\xc3\x90\xc3\xa9\n')
示例#20
0
 def __call__(self, parser, namespace, value, option_string=None):
   """Stores the contents of the file and the file name in namespace."""
   try:
     content = console_io.ReadFromFileOrStdin(value, binary=binary)
   except files.Error as e:
     raise ArgumentTypeError(e)
   setattr(namespace, self.dest, content)
   new_dest = '{}_path'.format(self.dest)
   setattr(namespace, new_dest, value)
示例#21
0
  def Run(self, args):
    holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
    client = holder.client

    forwarding_rule_ref = self.FORWARDING_RULE_ARG.ResolveAsResource(
        args,
        holder.resources,
        scope_lister=compute_flags.GetDefaultScopeLister(holder.client))

    data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)

    try:
      forwarding_rule = export_util.Import(
          message_type=client.messages.ForwardingRule,
          stream=data,
          schema_path=self.GetSchemaPath())
    except yaml_validator.ValidationError as e:
      raise exceptions.ValidationError(str(e))

    # Get existing forwarding rule.
    try:
      forwarding_rule_old = utils.SendGetRequest(client, forwarding_rule_ref)
    except apitools_exceptions.HttpError as error:
      if error.status_code != 404:
        raise error
      # Forwarding rule does not exist, create a new one.
      return self.SendInsertRequest(client, forwarding_rule_ref,
                                    forwarding_rule)

    # No change, do not send requests to server.
    if forwarding_rule_old == forwarding_rule:
      return

    console_io.PromptContinue(
        message=('Forwarding Rule [{0}] will be overwritten.').format(
            forwarding_rule_ref.Name()),
        cancel_on_no=True)

    # Populate id and fingerprint fields. These two fields are manually
    # removed from the schema files.
    forwarding_rule.id = forwarding_rule_old.id
    forwarding_rule.fingerprint = forwarding_rule_old.fingerprint

    # Unspecified fields are assumed to be cleared.
    cleared_fields = []
    if not forwarding_rule.networkTier:
      cleared_fields.append('networkTier')
    if not forwarding_rule.allowGlobalAccess:
      cleared_fields.append('allowGlobalAccess')
    if self._support_source_ip_range and not forwarding_rule.sourceIpRanges:
      cleared_fields.append('sourceIpRanges')
    if not forwarding_rule.metadataFilters:
      cleared_fields.append('metadataFilters')

    with client.apitools_client.IncludeFields(cleared_fields):
      return self.SendPatchRequest(client, forwarding_rule_ref, forwarding_rule)
示例#22
0
def _IsJsonFile(filename):
  """Check and validate if given filename is proper json file."""
  content = console_io.ReadFromFileOrStdin(filename, binary=True)
  try:
    return json.loads(encoding.Decode(content)), True
  except ValueError as e:
    if filename.endswith('.json'):
      raise auth_service_account.BadCredentialFileException(
          'Could not read json file {0}: {1}'.format(filename, e))
  return content, False
示例#23
0
def ReadSubstitutionRuleFile(file_arg):
    """Reads content of the substitution rule file specified in file_arg."""
    if not file_arg:
        return None
    data = console_io.ReadFromFileOrStdin(file_arg, binary=False)
    messages = api_util.GetMessagesModule()
    temp_restore_config = export_util.Import(
        message_type=messages.RestoreConfig,
        stream=data,
        schema_path=GetSchemaPath())
    return temp_restore_config.substitutionRules
示例#24
0
    def Run(self, args):
        project_ref = resources.REGISTRY.Parse(
            properties.VALUES.core.project.Get(required=True),
            collection='cloudresourcemanager.projects',
        )
        normalized_artifact_url = binauthz_command_util.NormalizeArtifactUrl(
            args.artifact_url)
        signature = console_io.ReadFromFileOrStdin(args.signature_file,
                                                   binary=True)
        if args.payload_file:
            payload = files.ReadBinaryFileContents(args.payload_file)
        else:
            payload = binauthz_command_util.MakeSignaturePayload(
                normalized_artifact_url)

        attestor_ref = args.CONCEPTS.attestor.Parse()
        api_version = apis.GetApiVersion(self.ReleaseTrack())
        attestor = attestors.Client(api_version).Get(attestor_ref)
        # TODO(b/79709480): Add other types of attestors if/when supported.
        note_ref = resources.REGISTRY.ParseResourceId(
            'containeranalysis.projects.notes',
            attestor.userOwnedDrydockNote.noteReference, {})

        validation_enabled = 'validate' in args and args.validate
        validation_callback = functools.partial(
            validation.validate_attestation,
            attestor_ref=attestor_ref,
            api_version=api_version)

        ca_api_version = ca_apis.GetApiVersion(self.ReleaseTrack())
        # TODO(b/138859339): Remove when remainder of surface migrated to V1 API.
        if ca_api_version == ca_apis.V1:
            return containeranalysis.Client(
                ca_api_version).CreateAttestationOccurrence(
                    project_ref=project_ref,
                    note_ref=note_ref,
                    artifact_url=normalized_artifact_url,
                    public_key_id=args.public_key_id,
                    signature=signature,
                    plaintext=payload,
                    validation_callback=(validation_callback
                                         if validation_enabled else None),
                )
        else:
            return containeranalysis.Client(
                ca_api_version).CreateGenericAttestationOccurrence(
                    project_ref=project_ref,
                    note_ref=note_ref,
                    artifact_url=normalized_artifact_url,
                    public_key_id=args.public_key_id,
                    signature=signature,
                    plaintext=payload,
                )
示例#25
0
def ReadAutoscalingPolicy(dataproc, policy_id, policy_file_name=None):
    """Returns autoscaling policy read from YAML file.

  Validates it using the schema for the API version corresponding to the
  dataproc instance, and backfills necessary fields.

  Args:
    dataproc: wrapper for dataproc resources, client and messages.
    policy_id: The autoscaling policy id (last piece of the resource name).
    policy_file_name: if set, location of the YAML file to read from. Otherwise,
      reads from stdin.

  Raises:
    argparse.ArgumentError if duration formats are invalid or out of bounds.
  """
    # Read template from YAML file, validate it using the schema for the
    # API version corresponding to the dataproc instance.
    data = console_io.ReadFromFileOrStdin(policy_file_name or '-',
                                          binary=False)
    schema_path = export_util.GetSchemaPath('dataproc',
                                            dataproc.api_version,
                                            'AutoscalingPolicy',
                                            for_help=False)

    try:
        policy = export_util.Import(
            message_type=dataproc.messages.AutoscalingPolicy,
            stream=data,
            schema_path=schema_path)
    except yaml_validator.ValidationError as e:
        raise exceptions.ValidationError(e.message)

    # Ignore user set id in the file (if any), and overwrite with the policy_ref
    # provided with this command
    policy.id = policy_id

    # Similarly, ignore the set resource name. This field is OUTPUT_ONLY, so we
    # can just clear it.
    policy.name = None

    # Set duration fields to their seconds values
    if policy.basicAlgorithm.cooldownPeriod is not None:
        policy.basicAlgorithm.cooldownPeriod = str(
            arg_parsers.Duration(lower_bound='2m', upper_bound='1d')(
                policy.basicAlgorithm.cooldownPeriod)) + 's'
    if policy.basicAlgorithm.yarnConfig.gracefulDecommissionTimeout is not None:
        policy.basicAlgorithm.yarnConfig.gracefulDecommissionTimeout = str(
            arg_parsers.Duration(lower_bound='0s', upper_bound='1d')(
                policy.basicAlgorithm.yarnConfig.gracefulDecommissionTimeout)
        ) + 's'

    return policy
示例#26
0
    def testExportToFile(self):
        ssl_policy_ref = self.GetSslPolicyRef(self._resource_name)
        self.ExpectGetRequest(ssl_policy_ref, self._existing_ssl_policy)

        file_name = os.path.join(self.temp_path, 'export.yaml')

        self.RunExport('{0} --destination {1}'.format(self._resource_name,
                                                      file_name))

        data = console_io.ReadFromFileOrStdin(file_name or '-', binary=False)
        exported_ssl_policy = export_util.Import(
            message_type=self.messages.SslPolicy, stream=data)
        self.assertEqual(self._existing_ssl_policy, exported_ssl_policy)
示例#27
0
  def testExportToFile(self):
    self.make_requests.side_effect = iter([
        [test_resources.TARGET_GRPC_PROXIES_ALPHA[0]],
    ])

    file_name = os.path.join(self.temp_path, 'export.yaml')
    self.RunExport('{0} --destination {1}'.format(self._resource_name,
                                                  file_name))

    data = console_io.ReadFromFileOrStdin(file_name or '-', binary=False)
    exported_target_grpc_proxy = export_util.Import(
        message_type=self.messages.TargetGrpcProxy, stream=data)
    self.assertEqual(self._existing_target_grpc_proxy,
                     exported_target_grpc_proxy)
示例#28
0
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())
        msgs = dataproc.messages

        data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)
        cluster = export_util.Import(message_type=msgs.Cluster, stream=data)

        cluster_ref = args.CONCEPTS.cluster.Parse()
        cluster.clusterName = cluster_ref.clusterName
        cluster.projectId = cluster_ref.projectId

        # Import only supports create, not update (for now).
        return clusters.CreateCluster(dataproc, cluster_ref, cluster,
                                      args.async_, args.timeout)
def ParseMysqlRdbmsFile(messages,
                        mysql_rdbms_file,
                        release_track=base.ReleaseTrack.BETA):
    """Parses a mysql_rdbms_file into the MysqlRdbms message."""
    data = console_io.ReadFromFileOrStdin(mysql_rdbms_file, binary=False)
    try:
        mysql_rdbms_head_data = yaml.load(data)
    except Exception as e:
        raise ds_exceptions.ParseError('Cannot parse YAML:[{0}]'.format(e))

    mysql_rdbms_data = mysql_rdbms_head_data.get('mysql_rdbms',
                                                 mysql_rdbms_head_data)
    return ParseMysqlSchemasListToMysqlRdbmsMessage(messages, mysql_rdbms_data,
                                                    release_track)
示例#30
0
    def Run(self, args):
        holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
        helper = ssl_policies_utils.SslPolicyHelper(holder)
        client = holder.client

        ssl_policy_ref = self.SSL_POLICY_ARG.ResolveAsResource(
            args,
            holder.resources,
            scope_lister=compute_flags.GetDefaultScopeLister(holder.client),
            default_scope=compute_scope.ScopeEnum.GLOBAL)

        data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)

        try:
            ssl_policy = export_util.Import(
                message_type=client.messages.SslPolicy,
                stream=data,
                schema_path=self.GetSchemaPath())
        except yaml_validator.ValidationError as e:
            raise compute_exceptions.ValidationError(str(e))

        # Get existing SSL policy.
        try:
            ssl_policy_old = helper.Describe(ssl_policy_ref)
        except apitools_exceptions.HttpError as error:
            if error.status_code != 404:
                raise error
            # SSL policy does not exist, create a new one.
            operation_ref = helper.Create(ssl_policy_ref, ssl_policy)
            return helper.WaitForOperation(ssl_policy_ref, operation_ref,
                                           'Creating SSL policy')

        # No change, do not send requests to server.
        if ssl_policy_old == ssl_policy:
            return

        console_io.PromptContinue(
            message=('SSL Policy [{0}] will be overwritten.').format(
                ssl_policy_ref.Name()),
            cancel_on_no=True)

        # Populate id and fingerprint fields. These two fields are manually
        # removed from the schema files.
        ssl_policy.id = ssl_policy_old.id
        ssl_policy.fingerprint = ssl_policy_old.fingerprint

        operation_ref = helper.Patch(ssl_policy_ref, ssl_policy, False)
        return helper.WaitForOperation(ssl_policy_ref, operation_ref,
                                       'Updating SSL policy')