Exemple #1
0
    def Run(self, args):
        self.ValidateArgs(args)

        dataproc = dp.Dataproc(self.ReleaseTrack())

        cluster_ref = args.CONCEPTS.cluster.Parse()

        compute_resources = compute_helpers.GetComputeResources(
            self.ReleaseTrack(), cluster_ref.clusterName, cluster_ref.region)

        cluster_config = clusters.GetClusterConfig(args,
                                                   dataproc,
                                                   cluster_ref.projectId,
                                                   compute_resources,
                                                   self.BETA,
                                                   include_ttl_config=True)

        cluster = dataproc.messages.Cluster(
            config=cluster_config,
            clusterName=cluster_ref.clusterName,
            projectId=cluster_ref.projectId)

        self.ConfigureCluster(dataproc.messages, args, cluster)

        return clusters.CreateCluster(dataproc, cluster_ref, cluster,
                                      args.async_, args.timeout)
Exemple #2
0
 def Run(self, args):
     dataproc = dp.Dataproc(self.ReleaseTrack())
     data = console_io.ReadFromFileOrStdin(args.file or '-', binary=False)
     cluster = export_util.Import(message_type=dataproc.messages.Cluster,
                                  stream=data)
     return clusters.CreateCluster(dataproc, cluster, args.async_,
                                   args.timeout)
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())
        cluster_ref = args.CONCEPTS.cluster.Parse()
        gke_cluster_ref = args.CONCEPTS.gke_cluster.Parse()
        metastore_service_ref = args.CONCEPTS.metastore_service.Parse()
        history_server_cluster_ref = args.CONCEPTS.history_server_cluster.Parse(
        )
        virtual_cluster_config = Create._GetVirtualClusterConfig(
            dataproc, gke_cluster_ref, args, metastore_service_ref,
            history_server_cluster_ref)

        Create._VerifyGkeClusterIsWorkloadIdentityEnabled(gke_cluster_ref)

        if args.setup_workload_identity:
            Create._SetupWorkloadIdentity(args, cluster_ref, gke_cluster_ref)

        cluster = dataproc.messages.Cluster(
            virtualClusterConfig=virtual_cluster_config,
            clusterName=cluster_ref.clusterName,
            projectId=cluster_ref.projectId)

        return clusters.CreateCluster(
            dataproc,
            cluster_ref,
            cluster,
            args.async_,
            args.timeout,
            # This refers to the old GKE beta.
            enable_create_on_gke=False,
            action_on_failed_primary_workers=None)
Exemple #4
0
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())
        msgs = dataproc.messages

        data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)
        cluster = export_util.Import(message_type=msgs.Cluster, stream=data)

        cluster_ref = args.CONCEPTS.cluster.Parse()
        cluster.clusterName = cluster_ref.clusterName
        cluster.projectId = cluster_ref.projectId

        # Import only supports create, not update (for now).
        return clusters.CreateCluster(dataproc, cluster_ref, cluster,
                                      args.async_, args.timeout)
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())
        msgs = dataproc.messages

        data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)
        try:
            cluster = export_util.Import(message_type=msgs.Cluster,
                                         stream=data,
                                         schema_path=self.GetSchemaPath())
        except yaml_validator.ValidationError as e:
            raise exceptions.ValidationError(e.message)

        cluster_ref = args.CONCEPTS.cluster.Parse()
        cluster.clusterName = cluster_ref.clusterName
        cluster.projectId = cluster_ref.projectId

        # Import only supports create, not update (for now).
        return clusters.CreateCluster(dataproc, cluster_ref, cluster,
                                      args.async_, args.timeout)
  def Run(self, args):
    self.ValidateArgs(args)

    dataproc = dp.Dataproc(self.ReleaseTrack())

    cluster_ref = args.CONCEPTS.cluster.Parse()

    compute_resources = compute_helpers.GetComputeResources(
        self.ReleaseTrack(), cluster_ref.clusterName, cluster_ref.region)

    cluster_config = clusters.GetClusterConfig(
        args,
        dataproc,
        cluster_ref.projectId,
        compute_resources,
        self.BETA,
        include_ttl_config=True,
        include_gke_platform_args=self.BETA)

    action_on_failed_primary_workers = None
    if not self.BETA:
      action_on_failed_primary_workers = arg_utils.ChoiceToEnum(
          args.action_on_failed_primary_workers,
          dataproc.messages.DataprocProjectsRegionsClustersCreateRequest
          .ActionOnFailedPrimaryWorkersValueValuesEnum)

    cluster = dataproc.messages.Cluster(
        config=cluster_config,
        clusterName=cluster_ref.clusterName,
        projectId=cluster_ref.projectId)

    self.ConfigureCluster(dataproc.messages, args, cluster)

    return clusters.CreateCluster(
        dataproc,
        cluster_ref,
        cluster,
        args.async_,
        args.timeout,
        enable_create_on_gke=self.BETA,
        action_on_failed_primary_workers=action_on_failed_primary_workers)