Esempio n. 1
0
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())
        msgs = dataproc.messages

        template_ref = args.CONCEPTS.template.Parse()
        # TODO(b/109837200) make the dataproc discovery doc parameters consistent
        # Parent() fails for the collection because of projectId/projectsId and
        # regionId/regionsId inconsistencies.
        # parent = template_ref.Parent().RelativePath()
        parent = '/'.join(template_ref.RelativeName().split('/')[0:4])

        if self.ReleaseTrack() == base.ReleaseTrack.GA:
            schema_path = V1_SCHEMA_PATH
        else:
            schema_path = V1_BETA2_SCHEMA_PATH

        if args.source:
            with files.FileReader(args.source) as stream:
                template = util.ReadYaml(message_type=msgs.WorkflowTemplate,
                                         stream=stream,
                                         schema_path=schema_path)
        else:
            template = util.ReadYaml(message_type=msgs.WorkflowTemplate,
                                     stream=sys.stdin,
                                     schema_path=schema_path)

        # Populate id field.
        template.id = template_ref.Name()

        try:
            old_template = dataproc.GetRegionsWorkflowTemplate(template_ref)
        except apitools_exceptions.HttpError as error:
            if error.status_code != 404:
                raise error
            # Template does not exist. Create a new one.
            request = msgs.DataprocProjectsRegionsWorkflowTemplatesCreateRequest(
                parent=parent, workflowTemplate=template)
            return dataproc.client.projects_regions_workflowTemplates.Create(
                request)
        # Update the existing template.
        console_io.PromptContinue(
            message=('Workflow template [{0}] will be overwritten.').format(
                template.id),
            cancel_on_no=True)
        # Populate version field and name field.
        template.version = old_template.version
        template.name = template_ref.RelativeName()
        return dataproc.client.projects_regions_workflowTemplates.Update(
            template)
Esempio n. 2
0
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())
        msgs = dataproc.messages

        # Generate uuid for request.
        instance_id = uuid.uuid4().hex
        regions_ref = util.ParseRegion(dataproc)
        # Read template from YAML file and validate it using a schema.
        with files.FileReader(args.file) as stream:
            template = util.ReadYaml(message_type=msgs.WorkflowTemplate,
                                     stream=stream,
                                     schema_path=V1_BETA2_SCHEMA_PATH)

        # Send instantiate inline request.
        request = \
          msgs.DataprocProjectsRegionsWorkflowTemplatesInstantiateInlineRequest(
              instanceId=instance_id,
              parent=regions_ref.RelativeName(),
              workflowTemplate=template)
        operation = \
          dataproc.client.projects_regions_workflowTemplates.InstantiateInline(
              request)
        if args. async:
            log.status.Print('Instantiating with operation [{0}].'.format(
                operation.name))
            return
        operation = util.WaitForWorkflowTemplateOperation(dataproc, operation)
        return operation
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())
        msgs = dataproc.messages

        # Generate uuid for request.
        instance_id = uuid.uuid4().hex
        regions_ref = util.ParseRegion(dataproc)
        # Read template from YAML file.
        template = util.ReadYaml(args.file, msgs.WorkflowTemplate)

        # Send instantiate inline request.
        request = \
          msgs.DataprocProjectsRegionsWorkflowTemplatesInstantiateInlineRequest(
              instanceId=instance_id,
              parent=regions_ref.RelativeName(),
              workflowTemplate=template)
        operation = \
          dataproc.client.projects_regions_workflowTemplates.InstantiateInline(
              request)
        if args. async:
            log.status.Print(
                'Instantiating [{0}] with operation [{1}].'.format(
                    template.id, operation.name))
            return
        operation = util.WaitForWorkflowTemplateOperation(dataproc, operation)
        return operation
Esempio n. 4
0
  def Run(self, args):
    dataproc = dp.Dataproc(self.ReleaseTrack())

    # Read cluster from YAML file.
    with files.FileReader(args.file) as stream:
      cluster = util.ReadYaml(
          message_type=dataproc.messages.Cluster, stream=stream)
    return clusters.CreateCluster(dataproc, cluster, args.async, args.timeout)
Esempio n. 5
0
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())
        msgs = dataproc.messages

        if args.source:
            with files.FileReader(args.source) as stream:
                cluster = util.ReadYaml(message_type=msgs.Cluster,
                                        stream=stream,
                                        schema_path=SCHEMA_PATH)
        else:
            cluster = util.ReadYaml(message_type=msgs.Cluster,
                                    stream=sys.stdin,
                                    schema_path=SCHEMA_PATH)

        cluster_ref = util.ParseCluster(args.name, dataproc)
        cluster.clusterName = cluster_ref.clusterName
        cluster.projectId = cluster_ref.projectId

        # Import only supports create, not update (for now).
        return clusters.CreateCluster(dataproc, cluster, args. async,
                                      args.timeout)
Esempio n. 6
0
  def Run(self, args):
    dataproc = dp.Dataproc(self.ReleaseTrack())

    # Read cluster from YAML file.
    cluster = util.ReadYaml(args.file, dataproc.messages.Cluster)
    return clusters.CreateCluster(dataproc, cluster, args.async, args.timeout)