def _Run(args, enable_labels=False, legacy_output=False):
  """Creates one or more topics."""
  client = topics.TopicsClient()

  labels = None
  if enable_labels:
    labels = labels_util.ParseCreateArgs(args,
                                         client.messages.Topic.LabelsValue)

  failed = []
  for topic_ref in args.CONCEPTS.topic.Parse():

    try:
      result = client.Create(topic_ref, labels=labels)
    except api_ex.HttpError as error:
      exc = exceptions.HttpException(error)
      log.CreatedResource(topic_ref.RelativeName(), kind='topic',
                          failed=exc.payload.status_message)
      failed.append(topic_ref.topicsId)
      continue

    if legacy_output:
      result = util.TopicDisplayDict(result)
    log.CreatedResource(topic_ref.RelativeName(), kind='topic')
    yield result

  if failed:
    raise util.RequestsFailedError(failed, 'create')
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())

        template = util.ParseWorkflowTemplates(args.template, dataproc)

        workflow_template = dataproc.GetRegionsWorkflowTemplate(
            template, args.version)

        cluster_name = template.workflowTemplatesId

        compute_resources = compute_helpers.GetComputeResources(
            self.ReleaseTrack(), cluster_name)
        use_accelerators = self.ReleaseTrack() == base.ReleaseTrack.BETA
        use_auto_delete_ttl = self.ReleaseTrack() == base.ReleaseTrack.BETA
        use_min_cpu_platform = self.ReleaseTrack() == base.ReleaseTrack.BETA

        cluster_config = clusters.GetClusterConfig(
            args, dataproc, template.projectsId, compute_resources,
            use_accelerators, use_auto_delete_ttl, use_min_cpu_platform)

        labels = labels_util.ParseCreateArgs(
            args, dataproc.messages.ManagedCluster.LabelsValue)

        managed_cluster = dataproc.messages.ManagedCluster(
            clusterName=cluster_name, config=cluster_config, labels=labels)

        workflow_template.placement = dataproc.messages.WorkflowTemplatePlacement(
            managedCluster=managed_cluster)

        response = dataproc.client.projects_regions_workflowTemplates.Update(
            workflow_template)
        return response
  def Run(self, args):
    """Default Run method implementation."""

    flags.CheckParentFlags(args, parent_required=False)
    project_id = args.id
    if not project_id and args.name:
      candidate = command_lib_util.IdFromName(args.name)
      if candidate and console_io.PromptContinue(
          'No project id provided.',
          'Use [{}] as project id'.format(candidate),
          throw_if_unattended=True):
        project_id = candidate
    if not project_id:
      raise exceptions.RequiredArgumentException(
          'PROJECT_ID', 'an id must be provided for the new project')
    project_ref = command_lib_util.ParseProject(project_id)
    labels = labels_util.ParseCreateArgs(
        args, projects_util.GetMessages().Project.LabelsValue)
    try:
      create_op = projects_api.Create(
          project_ref,
          display_name=args.name,
          parent=projects_api.ParentNameToResourceId(
              flags.GetParentFromFlags(args)),
          labels=labels)
    except apitools_exceptions.HttpConflictError:
      msg = ('Project creation failed. The project ID you specified is '
             'already in use by another project. Please try an alternative '
             'ID.')
      unused_type, unused_value, traceback = sys.exc_info()
      raise exceptions.HttpException, msg, traceback
    log.CreatedResource(project_ref, async=True)
    create_op = operations.WaitForOperation(create_op)

    # Enable cloudapis.googleapis.com
    if args.enable_cloud_apis:
      log.debug('Enabling cloudapis.googleapis.com')
      services_client = apis.GetClientInstance('servicemanagement', 'v1')
      enable_operation = services_enable_api.EnableServiceApiCall(
          project_ref.Name(), 'cloudapis.googleapis.com')
      enable_operation_ref = resources.REGISTRY.Parse(
          enable_operation.name, collection='servicemanagement.operations')
      services_util.WaitForOperation(enable_operation_ref, services_client)

    if args.set_as_default:
      project_property = properties.FromString('core/project')
      properties.PersistProperty(project_property, args.id)
      log.status.Print('Updated property [core/project] to [{0}].'
                       .format(args.id))

    return operations.ExtractOperationResponse(create_op,
                                               apis.GetMessagesModule(
                                                   'cloudresourcemanager',
                                                   'v1').Project)
  def _ConstructCreateSettingsFromArgs(cls,
                                       sql_messages,
                                       args,
                                       instance=None,
                                       release_track=DEFAULT_RELEASE_TRACK):
    """Constructs create settings object from base settings and args."""
    original_settings = instance.settings if instance else None
    settings = cls._ConstructBaseSettingsFromArgs(sql_messages, args, instance,
                                                  release_track)

    if args.on_premises_host_port:
      if args.require_ssl:
        raise exceptions.ToolException('Argument --on-premises-host-port not '
                                       'allowed with --require_ssl')
      settings.onPremisesConfiguration = sql_messages.OnPremisesConfiguration(
          hostPort=args.on_premises_host_port)

    backup_configuration = (reducers.BackupConfiguration(
        sql_messages,
        instance,
        backup=args.backup,
        backup_start_time=args.backup_start_time,
        enable_bin_log=args.enable_bin_log))
    if backup_configuration:
      cls.AddBackupConfigToSettings(settings, backup_configuration)

    settings.databaseFlags = (reducers.DatabaseFlags(
        sql_messages, original_settings, database_flags=args.database_flags))

    settings.maintenanceWindow = (reducers.MaintenanceWindow(
        sql_messages,
        instance,
        maintenance_release_channel=args.maintenance_release_channel,
        maintenance_window_day=args.maintenance_window_day,
        maintenance_window_hour=args.maintenance_window_hour))

    if args.storage_type:
      settings.dataDiskType = STORAGE_TYPE_PREFIX + args.storage_type

    # BETA args.
    if release_track == base.ReleaseTrack.BETA:
      settings.userLabels = labels_util.ParseCreateArgs(
          args, sql_messages.Settings.UserLabelsValue)

      # Check that availability type is only specified if this is Postgres.
      if (args.IsSpecified('availability_type') and
          not api_util.InstancesV1Beta4.IsPostgresDatabaseVersion(
              args.database_version)):
        raise exceptions.InvalidArgumentException(
            '--availability-type', 'Cannot set [--availability-type] on a '
            'non-Postgres instance.')

    return settings
  def Run(self, args):
    """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Yields:
      A serialized object (dict) describing the results of the operation.
      This description fits the Resource described in the ResourceRegistry under
      'pubsub.projects.snapshots'.

    Raises:
      util.RequestFailedError: if any of the requests to the API failed.
    """
    client = snapshots.SnapshotsClient()

    subscription_ref = util.ParseSubscription(
        args.subscription, args.subscription_project)

    labels = labels_util.ParseCreateArgs(
        args, client.messages.CreateSnapshotRequest.LabelsValue)

    failed = []
    for snapshot_name in args.snapshot:
      snapshot_ref = util.ParseSnapshot(snapshot_name)

      try:
        result = client.Create(snapshot_ref, subscription_ref, labels=labels)
      except api_ex.HttpError as error:
        exc = exceptions.HttpException(error)
        log.CreatedResource(snapshot_ref.RelativeName(), kind='snapshot',
                            failed=exc.payload.status_message)
        failed.append(snapshot_name)
        continue

      result = util.SnapshotDisplayDict(result)
      log.CreatedResource(snapshot_ref.RelativeName(), kind='snapshot')
      yield result

    if failed:
      raise util.RequestsFailedError(failed, 'create')
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())
        messages = dataproc.messages

        template_ref = util.ParseWorkflowTemplates(args.template, dataproc)
        regions_ref = util.ParseRegion(dataproc)

        workflow_template = messages.WorkflowTemplate(
            id=args.template,
            name=template_ref.RelativeName(),
            labels=labels_util.ParseCreateArgs(
                args, messages.WorkflowTemplate.LabelsValue))

        request = messages.DataprocProjectsRegionsWorkflowTemplatesCreateRequest(
            parent=regions_ref.RelativeName(),
            workflowTemplate=workflow_template)

        template = dataproc.client.projects_regions_workflowTemplates.Create(
            request)
        return template
    def Run(self, args):
        client = cloudkms_base.GetClientInstance()
        messages = cloudkms_base.GetMessagesModule()

        crypto_key_ref = flags.ParseCryptoKeyName(args)
        parent_ref = flags.ParseParentFromResource(crypto_key_ref)

        req = messages.CloudkmsProjectsLocationsKeyRingsCryptoKeysCreateRequest(
            parent=parent_ref.RelativeName(),
            cryptoKeyId=crypto_key_ref.Name(),
            cryptoKey=messages.CryptoKey(
                # TODO(b/35914817): Find a better way to get the enum value by name.
                purpose=getattr(messages.CryptoKey.PurposeValueValuesEnum,
                                PURPOSE_MAP[args.purpose]),
                labels=labels_util.ParseCreateArgs(
                    args, messages.CryptoKey.LabelsValue)))

        flags.SetNextRotationTime(args, req.cryptoKey)
        flags.SetRotationPeriod(args, req.cryptoKey)

        return client.projects_locations_keyRings_cryptoKeys.Create(req)
Ejemplo n.º 8
0
def ParseCreateLabels(jobs_client, args):
  return labels_util.ParseCreateArgs(args, jobs_client.job_class.LabelsValue)
Ejemplo n.º 9
0
 def ConfigureCluster(messages, args, cluster):
     """Performs any additional configuration of the cluster."""
     cluster.labels = labels_util.ParseCreateArgs(
         args, messages.Cluster.LabelsValue)
 def ConfigureJob(messages, job, args):
     """Add type-specific job configuration to job message."""
     # Parse labels (if present)
     job.labels = labels_util.ParseCreateArgs(args,
                                              messages.Job.LabelsValue)
Ejemplo n.º 11
0
def ParseCreateLabels(client, args):
    return labels_util.ParseCreateArgs(args, client.version_class.LabelsValue)
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: argparse.Namespace, All the arguments that were provided to this
        command invocation.

    Raises:
      files.Error: A file argument could not be read.
      GenomicsError: User input was invalid.
      HttpException: An http error response was received while executing api
          request.
    Returns:
      Operation representing the running pipeline.
    """
        apitools_client = genomics_util.GetGenomicsClient('v1alpha2')
        genomics_messages = genomics_util.GetGenomicsMessages('v1alpha2')

        pipeline = genomics_util.GetFileAsMessage(
            args.pipeline_file, genomics_messages.Pipeline,
            self.context[lib.STORAGE_V1_CLIENT_KEY])
        pipeline.projectId = genomics_util.GetProjectId()

        arg_inputs = _ValidateAndMergeArgInputs(args)

        inputs = genomics_util.ArgDictToAdditionalPropertiesList(
            arg_inputs,
            genomics_messages.RunPipelineArgs.InputsValue.AdditionalProperty)
        outputs = genomics_util.ArgDictToAdditionalPropertiesList(
            args.outputs,
            genomics_messages.RunPipelineArgs.OutputsValue.AdditionalProperty)

        # Set "overrides" on the resources. If the user did not pass anything on
        # the command line, do not set anything in the resource: preserve the
        # user-intent "did not set" vs. "set an empty value/list"

        resources = genomics_messages.PipelineResources(
            preemptible=args.preemptible)
        if args.memory:
            resources.minimumRamGb = args.memory
        if args.disk_size:
            resources.disks = []
            for disk_encoding in args.disk_size.split(','):
                disk_args = disk_encoding.split(':', 1)
                resources.disks.append(
                    genomics_messages.Disk(name=disk_args[0],
                                           sizeGb=int(disk_args[1])))

        # Progression for picking the right zones...
        #   If specified on the command line, use them.
        #   If specified in the Pipeline definition, use them.
        #   If there is a GCE default zone in the local configuration, use it.
        #   Else let the API select a zone
        if args.zones:
            resources.zones = args.zones
        elif pipeline.resources and pipeline.resources.zones:
            pass
        elif properties.VALUES.compute.zone.Get():
            resources.zones = [properties.VALUES.compute.zone.Get()]

        request = genomics_messages.RunPipelineRequest(
            ephemeralPipeline=pipeline,
            pipelineArgs=genomics_messages.RunPipelineArgs(
                inputs=genomics_messages.RunPipelineArgs.InputsValue(
                    additionalProperties=inputs),
                outputs=genomics_messages.RunPipelineArgs.OutputsValue(
                    additionalProperties=outputs),
                clientId=args.run_id,
                logging=genomics_messages.LoggingOptions(gcsPath=args.logging),
                labels=labels_util.ParseCreateArgs(
                    args, genomics_messages.RunPipelineArgs.LabelsValue),
                projectId=genomics_util.GetProjectId(),
                serviceAccount=genomics_messages.ServiceAccount(
                    email=args.service_account_email,
                    scopes=args.service_account_scopes),
                resources=resources))
        result = apitools_client.pipelines.Run(request)
        log.status.Print('Running [{0}].'.format(result.name))
        return result
def _RunCreate(compute_api,
               args,
               support_source_instance,
               support_network_tier=False):
  """Common routine for creating instance template.

  This is shared between various release tracks.

  Args:
      compute_api: The compute api.
      args: argparse.Namespace, An object that contains the values for the
          arguments specified in the .Args() method.
      support_source_instance: indicates whether source instance is supported.
      support_network_tier: Indicates whether network tier is supported or not.

  Returns:
      A resource object dispatched by display.Displayer().
  """
  _ValidateInstancesFlags(args)
  if support_network_tier:
    instances_flags.ValidateNetworkTierArgs(args)

  client = compute_api.client

  boot_disk_size_gb = utils.BytesToGb(args.boot_disk_size)
  utils.WarnIfDiskSizeIsTooSmall(boot_disk_size_gb, args.boot_disk_type)

  instance_template_ref = (
      Create.InstanceTemplateArg.ResolveAsResource(
          args, compute_api.resources))

  metadata = metadata_utils.ConstructMetadataMessage(
      client.messages,
      metadata=args.metadata,
      metadata_from_file=args.metadata_from_file)

  if hasattr(args, 'network_interface') and args.network_interface:
    network_interfaces = (
        instance_template_utils.CreateNetworkInterfaceMessages)(
            resources=compute_api.resources,
            scope_lister=flags.GetDefaultScopeLister(client),
            messages=client.messages,
            network_interface_arg=args.network_interface,
            region=args.region,
            support_network_tier=support_network_tier)
  else:
    network_tier = getattr(args, 'network_tier', None)
    network_interfaces = [
        instance_template_utils.CreateNetworkInterfaceMessage(
            resources=compute_api.resources,
            scope_lister=flags.GetDefaultScopeLister(client),
            messages=client.messages,
            network=args.network,
            region=args.region,
            subnet=args.subnet,
            address=(instance_template_utils.EPHEMERAL_ADDRESS
                     if not args.no_address and not args.address
                     else args.address),
            network_tier=network_tier)
    ]

  scheduling = instance_utils.CreateSchedulingMessage(
      messages=client.messages,
      maintenance_policy=args.maintenance_policy,
      preemptible=args.preemptible,
      restart_on_failure=args.restart_on_failure)

  if args.no_service_account:
    service_account = None
  else:
    service_account = args.service_account
  service_accounts = instance_utils.CreateServiceAccountMessages(
      messages=client.messages,
      scopes=[] if args.no_scopes else args.scopes,
      service_account=service_account)

  create_boot_disk = not instance_utils.UseExistingBootDisk(args.disk or [])
  if create_boot_disk:
    image_expander = image_utils.ImageExpander(client, compute_api.resources)
    try:
      image_uri, _ = image_expander.ExpandImageFlag(
          user_project=instance_template_ref.project,
          image=args.image,
          image_family=args.image_family,
          image_project=args.image_project,
          return_image_resource=True)
    except utils.ImageNotFoundError as e:
      if args.IsSpecified('image_project'):
        raise e
      image_uri, _ = image_expander.ExpandImageFlag(
          user_project=instance_template_ref.project,
          image=args.image,
          image_family=args.image_family,
          image_project=args.image_project,
          return_image_resource=False)
      raise utils.ImageNotFoundError(
          'The resource [{}] was not found. Is the image located in another '
          'project? Use the --image-project flag to specify the '
          'project where the image is located.'.format(image_uri))
  else:
    image_uri = None

  if args.tags:
    tags = client.messages.Tags(items=args.tags)
  else:
    tags = None

  persistent_disks = (
      instance_template_utils.CreatePersistentAttachedDiskMessages(
          client.messages, args.disk or []))

  persistent_create_disks = (
      instance_template_utils.CreatePersistentCreateDiskMessages(
          client, compute_api.resources, instance_template_ref.project,
          getattr(args, 'create_disk', [])))

  if create_boot_disk:
    boot_disk_list = [
        instance_template_utils.CreateDefaultBootAttachedDiskMessage(
            messages=client.messages,
            disk_type=args.boot_disk_type,
            disk_device_name=args.boot_disk_device_name,
            disk_auto_delete=args.boot_disk_auto_delete,
            disk_size_gb=boot_disk_size_gb,
            image_uri=image_uri)]
  else:
    boot_disk_list = []

  local_ssds = []
  for x in args.local_ssd or []:
    local_ssd = instance_utils.CreateLocalSsdMessage(
        compute_api.resources,
        client.messages,
        x.get('device-name'),
        x.get('interface'),
        x.get('size'))
    local_ssds.append(local_ssd)

  disks = (
      boot_disk_list + persistent_disks + persistent_create_disks + local_ssds
  )

  machine_type = instance_utils.InterpretMachineType(
      machine_type=args.machine_type,
      custom_cpu=args.custom_cpu,
      custom_memory=args.custom_memory,
      ext=getattr(args, 'custom_extensions', None))

  guest_accelerators = (
      instance_template_utils.CreateAcceleratorConfigMessages(
          client.messages, getattr(args, 'accelerator', None)))

  instance_template = client.messages.InstanceTemplate(
      properties=client.messages.InstanceProperties(
          machineType=machine_type,
          disks=disks,
          canIpForward=args.can_ip_forward,
          metadata=metadata,
          minCpuPlatform=args.min_cpu_platform,
          networkInterfaces=network_interfaces,
          serviceAccounts=service_accounts,
          scheduling=scheduling,
          tags=tags,
          guestAccelerators=guest_accelerators,
      ),
      description=args.description,
      name=instance_template_ref.Name(),
  )

  request = client.messages.ComputeInstanceTemplatesInsertRequest(
      instanceTemplate=instance_template,
      project=instance_template_ref.project)

  request.instanceTemplate.properties.labels = labels_util.ParseCreateArgs(
      args, client.messages.InstanceProperties.LabelsValue)

  _AddSourceInstanceToTemplate(
      compute_api, args, instance_template, support_source_instance)

  return client.MakeRequests([(client.apitools_client.instanceTemplates,
                               'Insert', request)])
Ejemplo n.º 14
0
def ParseCreateLabels(models_client, args):
  return labels_util.ParseCreateArgs(
      args, models_client.messages.GoogleCloudMlV1Model.LabelsValue)