Exemplo n.º 1
0
def ReadAutoscalingPolicy(dataproc, policy_id, policy_file_name=None):
    """Returns autoscaling policy read from YAML file.

  Validates it using the schema for the API version corresponding to the
  dataproc instance, and backfills necessary fields.

  Args:
    dataproc: wrapper for dataproc resources, client and messages.
    policy_id: The autoscaling policy id (last piece of the resource name).
    policy_file_name: if set, location of the YAML file to read from. Otherwise,
      reads from stdin.

  Raises:
    argparse.ArgumentError if duration formats are invalid or out of bounds.
  """
    # Read template from YAML file, validate it using the schema for the
    # API version corresponding to the dataproc instance.
    data = console_io.ReadFromFileOrStdin(policy_file_name or '-',
                                          binary=False)
    schema_path = export_util.GetSchemaPath('dataproc',
                                            dataproc.api_version,
                                            'AutoscalingPolicy',
                                            for_help=False)

    try:
        policy = export_util.Import(
            message_type=dataproc.messages.AutoscalingPolicy,
            stream=data,
            schema_path=schema_path)
    except yaml_validator.ValidationError as e:
        raise exceptions.ValidationError(e.message)

    # Ignore user set id in the file (if any), and overwrite with the policy_ref
    # provided with this command
    policy.id = policy_id

    # Similarly, ignore the set resource name. This field is OUTPUT_ONLY, so we
    # can just clear it.
    policy.name = None

    # Set duration fields to their seconds values
    if policy.basicAlgorithm.cooldownPeriod is not None:
        policy.basicAlgorithm.cooldownPeriod = str(
            arg_parsers.Duration(lower_bound='2m', upper_bound='1d')(
                policy.basicAlgorithm.cooldownPeriod)) + 's'
    if policy.basicAlgorithm.yarnConfig.gracefulDecommissionTimeout is not None:
        policy.basicAlgorithm.yarnConfig.gracefulDecommissionTimeout = str(
            arg_parsers.Duration(lower_bound='0s', upper_bound='1d')(
                policy.basicAlgorithm.yarnConfig.gracefulDecommissionTimeout)
        ) + 's'

    return policy
Exemplo n.º 2
0
  def Run(self, args):
    dataproc = dp.Dataproc(self.ReleaseTrack())
    msgs = dataproc.messages

    data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)
    try:
      cluster = export_util.Import(
          message_type=msgs.Cluster,
          stream=data,
          schema_path=self.GetSchemaPath())
    except yaml_validator.ValidationError as e:
      raise exceptions.ValidationError(e.message)

    cluster_ref = dp_util.ParseCluster(args.name, dataproc)
    cluster.clusterName = cluster_ref.clusterName
    cluster.projectId = cluster_ref.projectId

    # Import only supports create, not update (for now).
    return clusters.CreateCluster(dataproc, cluster, args.async, args.timeout)
Exemplo n.º 3
0
  def Run(self, args):
    dataproc = dp.Dataproc(self.ReleaseTrack())
    msgs = dataproc.messages

    template_ref = args.CONCEPTS.template.Parse()
    # TODO(b/109837200) make the dataproc discovery doc parameters consistent
    # Parent() fails for the collection because of projectId/projectsId and
    # regionId/regionsId inconsistencies.
    # parent = template_ref.Parent().RelativePath()
    parent = '/'.join(template_ref.RelativeName().split('/')[0:4])

    data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)
    try:
      template = export_util.Import(
          message_type=msgs.WorkflowTemplate,
          stream=data,
          schema_path=self.GetSchemaPath())
    except yaml_validator.ValidationError as e:
      raise exceptions.ValidationError(e.message)

    # Populate id field.
    template.id = template_ref.Name()

    try:
      old_template = dataproc.GetRegionsWorkflowTemplate(template_ref)
    except apitools_exceptions.HttpError as error:
      if error.status_code != 404:
        raise error
      # Template does not exist. Create a new one.
      request = msgs.DataprocProjectsRegionsWorkflowTemplatesCreateRequest(
          parent=parent, workflowTemplate=template)
      return dataproc.client.projects_regions_workflowTemplates.Create(request)
    # Update the existing template.
    console_io.PromptContinue(
        message=('Workflow template [{0}] will be overwritten.').format(
            template.id),
        cancel_on_no=True)
    # Populate version field and name field.
    template.version = old_template.version
    template.name = template_ref.RelativeName()
    return dataproc.client.projects_regions_workflowTemplates.Update(template)