Esempio n. 1
0
 def Run(self, args):
     dataproc = dp.Dataproc(self.ReleaseTrack())
     data = console_io.ReadFromFileOrStdin(args.file or '-', binary=False)
     cluster = export_util.Import(message_type=dataproc.messages.Cluster,
                                  stream=data)
     return clusters.CreateCluster(dataproc, cluster, args. async,
                                   args.timeout)
Esempio n. 2
0
    def Run(self, args):
        """Runs the command.

    Args:
      args: argparse.Namespace, An object that contains the values for the
        arguments specified in the .Args() method.

    Returns:
      A response object returned by rpc call Validate.
    """
        project = properties.VALUES.core.project.GetOrFail()
        holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
        client = holder.client

        # Import UrlMap to be verified
        data = console_io.ReadFromFileOrStdin(args.source, binary=False)
        try:
            url_map = export_util.Import(message_type=client.messages.UrlMap,
                                         stream=data,
                                         schema_path=_GetSchemaPath(
                                             self.ReleaseTrack()))
        except yaml_validator.ValidationError as e:
            raise exceptions.ToolException(str(e))

        # Send UrlMap.validate request
        if args.region is not None:
            return _SendRegionalRequest(client, project, args.region, url_map)
        return _SendGlobalRequest(client, project, url_map)
Esempio n. 3
0
  def Run(self, args):
    dataproc = dp.Dataproc(self.ReleaseTrack())
    msgs = dataproc.messages

    # Generate uuid for request.
    instance_id = uuid.uuid4().hex
    regions_ref = dp_util.ParseRegion(dataproc)

    if args.file.startswith('gs://'):
      data = storage_helpers.ReadObject(args.file)
    else:
      data = console_io.ReadFromFileOrStdin(args.file, binary=False)
    template = export_util.Import(
        message_type=msgs.WorkflowTemplate, stream=data)

    # Send instantiate inline request.
    request = \
      msgs.DataprocProjectsRegionsWorkflowTemplatesInstantiateInlineRequest(
          parent=regions_ref.RelativeName(),
          workflowTemplate=template)

    request.requestId = instance_id

    operation = \
      dataproc.client.projects_regions_workflowTemplates.InstantiateInline(
          request)
    if args.async_:
      log.status.Print('Instantiating with operation [{0}].'.format(
          operation.name))
      return
    operation = dp_util.WaitForWorkflowTemplateOperation(dataproc, operation)
    return operation
Esempio n. 4
0
def _Run(args, holder, target_https_proxy_arg, release_track):
    """Issues requests necessary to import target HTTPS proxies."""
    client = holder.client

    target_https_proxy_ref = target_https_proxy_arg.ResolveAsResource(
        args,
        holder.resources,
        default_scope=compute_scope.ScopeEnum.GLOBAL,
        scope_lister=compute_flags.GetDefaultScopeLister(client))

    data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)

    try:
        target_https_proxy = export_util.Import(
            message_type=client.messages.TargetHttpsProxy,
            stream=data,
            schema_path=_GetSchemaPath(release_track))
    except yaml_validator.ValidationError as e:
        raise exceptions.ToolException(str(e))

    # Get existing target HTTPS proxy.
    try:
        target_https_proxies_utils.SendGetRequest(client,
                                                  target_https_proxy_ref)
    except apitools_exceptions.HttpError as error:
        if error.status_code != 404:
            raise error
        # Target HTTPS proxy does not exist, create a new one.
        return _SendInsertRequest(client, target_https_proxy_ref,
                                  target_https_proxy)

    console_message = ('Target HTTPS Proxy [{0}] cannot be updated'.format(
        target_https_proxy_ref.Name()))
    raise NotImplementedError(console_message)
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())
        msgs = dataproc.messages

        # Generate uuid for request.
        instance_id = uuid.uuid4().hex
        regions_ref = dp_util.ParseRegion(dataproc)

        data = console_io.ReadFromFileOrStdin(args.file or '-', binary=False)
        template = export_util.Import(message_type=msgs.WorkflowTemplate,
                                      stream=data)

        # Send instantiate inline request.
        request = \
          msgs.DataprocProjectsRegionsWorkflowTemplatesInstantiateInlineRequest(
              parent=regions_ref.RelativeName(),
              workflowTemplate=template)

        if dataproc.api_version == 'v1':
            # Deprecated field in v1beta2
            request.requestId = instance_id
        else:
            # new field not in v1
            request.instanceId = instance_id

        operation = \
          dataproc.client.projects_regions_workflowTemplates.InstantiateInline(
              request)
        if args.async_:
            log.status.Print('Instantiating with operation [{0}].'.format(
                operation.name))
            return
        operation = dp_util.WaitForWorkflowTemplateOperation(
            dataproc, operation)
        return operation
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())
        msgs = dataproc.messages

        # Generate uuid for request.
        instance_id = uuid.uuid4().hex
        regions_ref = dp_util.ParseRegion(dataproc)
        # Read template from YAML file and validate it using a schema.
        data = console_io.ReadFromFileOrStdin(args.file or '-', binary=False)
        template = export_util.Import(message_type=msgs.WorkflowTemplate,
                                      stream=data,
                                      schema_path=export_util.GetSchemaPath(
                                          'dataproc',
                                          api_version='v1beta2',
                                          message_name='WorkflowTemplate'))

        # Send instantiate inline request.
        request = \
          msgs.DataprocProjectsRegionsWorkflowTemplatesInstantiateInlineRequest(
              instanceId=instance_id,
              parent=regions_ref.RelativeName(),
              workflowTemplate=template)
        operation = \
          dataproc.client.projects_regions_workflowTemplates.InstantiateInline(
              request)
        if args.async_:
            log.status.Print('Instantiating with operation [{0}].'.format(
                operation.name))
            return
        operation = dp_util.WaitForWorkflowTemplateOperation(
            dataproc, operation)
        return operation
Esempio n. 7
0
  def Run(self, args):
    holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
    client = holder.client

    backend_service_ref = (
        flags.GLOBAL_REGIONAL_BACKEND_SERVICE_ARG.ResolveAsResource(
            args,
            holder.resources,
            scope_lister=compute_flags.GetDefaultScopeLister(client)))

    data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)

    try:
      backend_service = export_util.Import(
          message_type=client.messages.BackendService,
          stream=data,
          schema_path=self.GetSchemaPath())
    except yaml_validator.ValidationError as e:
      raise exceptions.ToolException(e.message)

    # Get existing backend service.
    try:
      backend_service_old = backend_services_utils.SendGetRequest(
          client, backend_service_ref)
    except apitools_exceptions.HttpError as error:
      if error.status_code != 404:
        raise error
      # Backend service does not exist, create a new one.
      return self.SendInsertRequest(client, backend_service_ref,
                                    backend_service)

    # No change, do not send requests to server.
    if backend_service_old == backend_service:
      return

    console_io.PromptContinue(
        message=('Backend Service [{0}] will be overwritten.').format(
            backend_service_ref.Name()),
        cancel_on_no=True)

    # populate id and fingerprint fields. These two fields are manually
    # removed from the schema files.
    backend_service.id = backend_service_old.id
    backend_service.fingerprint = backend_service_old.fingerprint

    # Unspecified fields are assumed to be cleared.
    cleared_fields = []
    if hasattr(backend_service, 'securitySettings') is None:
      cleared_fields.append('securitySettings')
    if hasattr(backend_service, 'localityLbPolicy') is None:
      cleared_fields.append('localityLbPolicy')
    if hasattr(backend_service, 'circuitBreakers') is None:
      cleared_fields.append('circuitBreakers')
    if hasattr(backend_service, 'consistentHash') is None:
      cleared_fields.append('consistentHash')
    if hasattr(backend_service, 'outlierDetection') is None:
      cleared_fields.append('outlierDetection')

    with client.apitools_client.IncludeFields(cleared_fields):
      return self.SendPatchRequest(client, backend_service_ref, backend_service)
Esempio n. 8
0
  def Run(self, args):
    holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
    client = holder.client

    forwarding_rule_ref = self.FORWARDING_RULE_ARG.ResolveAsResource(
        args,
        holder.resources,
        scope_lister=compute_flags.GetDefaultScopeLister(holder.client))

    data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)

    try:
      forwarding_rule = export_util.Import(
          message_type=client.messages.ForwardingRule,
          stream=data,
          schema_path=self.GetSchemaPath())
    except yaml_validator.ValidationError as e:
      raise exceptions.ValidationError(str(e))

    # Get existing forwarding rule.
    try:
      forwarding_rule_old = utils.SendGetRequest(client, forwarding_rule_ref)
    except apitools_exceptions.HttpError as error:
      if error.status_code != 404:
        raise error
      # Forwarding rule does not exist, create a new one.
      return self.SendInsertRequest(client, forwarding_rule_ref,
                                    forwarding_rule)

    # No change, do not send requests to server.
    if forwarding_rule_old == forwarding_rule:
      return

    console_io.PromptContinue(
        message=('Forwarding Rule [{0}] will be overwritten.').format(
            forwarding_rule_ref.Name()),
        cancel_on_no=True)

    # Populate id and fingerprint fields. These two fields are manually
    # removed from the schema files.
    forwarding_rule.id = forwarding_rule_old.id
    forwarding_rule.fingerprint = forwarding_rule_old.fingerprint

    # Unspecified fields are assumed to be cleared.
    cleared_fields = []
    if not forwarding_rule.networkTier:
      cleared_fields.append('networkTier')
    if not forwarding_rule.allowGlobalAccess:
      cleared_fields.append('allowGlobalAccess')
    if self._support_source_ip_range and not forwarding_rule.sourceIpRanges:
      cleared_fields.append('sourceIpRanges')
    if not forwarding_rule.metadataFilters:
      cleared_fields.append('metadataFilters')

    with client.apitools_client.IncludeFields(cleared_fields):
      return self.SendPatchRequest(client, forwarding_rule_ref, forwarding_rule)
Esempio n. 9
0
def ReadSubstitutionRuleFile(file_arg):
    """Reads content of the substitution rule file specified in file_arg."""
    if not file_arg:
        return None
    data = console_io.ReadFromFileOrStdin(file_arg, binary=False)
    messages = api_util.GetMessagesModule()
    temp_restore_config = export_util.Import(
        message_type=messages.RestoreConfig,
        stream=data,
        schema_path=GetSchemaPath())
    return temp_restore_config.substitutionRules
Esempio n. 10
0
def ReadAutoscalingPolicy(dataproc, policy_id, policy_file_name=None):
    """Returns autoscaling policy read from YAML file.

  Validates it using the schema for the API version corresponding to the
  dataproc instance, and backfills necessary fields.

  Args:
    dataproc: wrapper for dataproc resources, client and messages.
    policy_id: The autoscaling policy id (last piece of the resource name).
    policy_file_name: if set, location of the YAML file to read from. Otherwise,
      reads from stdin.

  Raises:
    argparse.ArgumentError if duration formats are invalid or out of bounds.
  """
    # Read template from YAML file, validate it using the schema for the
    # API version corresponding to the dataproc instance.
    data = console_io.ReadFromFileOrStdin(policy_file_name or '-',
                                          binary=False)
    schema_path = export_util.GetSchemaPath('dataproc',
                                            dataproc.api_version,
                                            'AutoscalingPolicy',
                                            for_help=False)

    try:
        policy = export_util.Import(
            message_type=dataproc.messages.AutoscalingPolicy,
            stream=data,
            schema_path=schema_path)
    except yaml_validator.ValidationError as e:
        raise exceptions.ValidationError(e.message)

    # Ignore user set id in the file (if any), and overwrite with the policy_ref
    # provided with this command
    policy.id = policy_id

    # Similarly, ignore the set resource name. This field is OUTPUT_ONLY, so we
    # can just clear it.
    policy.name = None

    # Set duration fields to their seconds values
    if policy.basicAlgorithm.cooldownPeriod is not None:
        policy.basicAlgorithm.cooldownPeriod = str(
            arg_parsers.Duration(lower_bound='2m', upper_bound='1d')(
                policy.basicAlgorithm.cooldownPeriod)) + 's'
    if policy.basicAlgorithm.yarnConfig.gracefulDecommissionTimeout is not None:
        policy.basicAlgorithm.yarnConfig.gracefulDecommissionTimeout = str(
            arg_parsers.Duration(lower_bound='0s', upper_bound='1d')(
                policy.basicAlgorithm.yarnConfig.gracefulDecommissionTimeout)
        ) + 's'

    return policy
Esempio n. 11
0
    def testExportToFile(self):
        ssl_policy_ref = self.GetSslPolicyRef(self._resource_name)
        self.ExpectGetRequest(ssl_policy_ref, self._existing_ssl_policy)

        file_name = os.path.join(self.temp_path, 'export.yaml')

        self.RunExport('{0} --destination {1}'.format(self._resource_name,
                                                      file_name))

        data = console_io.ReadFromFileOrStdin(file_name or '-', binary=False)
        exported_ssl_policy = export_util.Import(
            message_type=self.messages.SslPolicy, stream=data)
        self.assertEqual(self._existing_ssl_policy, exported_ssl_policy)
Esempio n. 12
0
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())
        msgs = dataproc.messages

        data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)
        cluster = export_util.Import(message_type=msgs.Cluster, stream=data)

        cluster_ref = args.CONCEPTS.cluster.Parse()
        cluster.clusterName = cluster_ref.clusterName
        cluster.projectId = cluster_ref.projectId

        # Import only supports create, not update (for now).
        return clusters.CreateCluster(dataproc, cluster_ref, cluster,
                                      args.async_, args.timeout)
Esempio n. 13
0
  def testExportToFile(self):
    self.make_requests.side_effect = iter([
        [test_resources.TARGET_GRPC_PROXIES_ALPHA[0]],
    ])

    file_name = os.path.join(self.temp_path, 'export.yaml')
    self.RunExport('{0} --destination {1}'.format(self._resource_name,
                                                  file_name))

    data = console_io.ReadFromFileOrStdin(file_name or '-', binary=False)
    exported_target_grpc_proxy = export_util.Import(
        message_type=self.messages.TargetGrpcProxy, stream=data)
    self.assertEqual(self._existing_target_grpc_proxy,
                     exported_target_grpc_proxy)
Esempio n. 14
0
    def Run(self, args):
        holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
        helper = ssl_policies_utils.SslPolicyHelper(holder)
        client = holder.client

        ssl_policy_ref = self.SSL_POLICY_ARG.ResolveAsResource(
            args,
            holder.resources,
            scope_lister=compute_flags.GetDefaultScopeLister(holder.client),
            default_scope=compute_scope.ScopeEnum.GLOBAL)

        data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)

        try:
            ssl_policy = export_util.Import(
                message_type=client.messages.SslPolicy,
                stream=data,
                schema_path=self.GetSchemaPath())
        except yaml_validator.ValidationError as e:
            raise compute_exceptions.ValidationError(str(e))

        # Get existing SSL policy.
        try:
            ssl_policy_old = helper.Describe(ssl_policy_ref)
        except apitools_exceptions.HttpError as error:
            if error.status_code != 404:
                raise error
            # SSL policy does not exist, create a new one.
            operation_ref = helper.Create(ssl_policy_ref, ssl_policy)
            return helper.WaitForOperation(ssl_policy_ref, operation_ref,
                                           'Creating SSL policy')

        # No change, do not send requests to server.
        if ssl_policy_old == ssl_policy:
            return

        console_io.PromptContinue(
            message=('SSL Policy [{0}] will be overwritten.').format(
                ssl_policy_ref.Name()),
            cancel_on_no=True)

        # Populate id and fingerprint fields. These two fields are manually
        # removed from the schema files.
        ssl_policy.id = ssl_policy_old.id
        ssl_policy.fingerprint = ssl_policy_old.fingerprint

        operation_ref = helper.Patch(ssl_policy_ref, ssl_policy, False)
        return helper.WaitForOperation(ssl_policy_ref, operation_ref,
                                       'Updating SSL policy')
Esempio n. 15
0
    def testExportToFile(self):
        backend_service_ref = self.GetBackendServiceRef('my-backend-service',
                                                        region='alaska')
        self.ExpectGetRequest(backend_service_ref=backend_service_ref,
                              backend_service=self._backend_services[1])

        file_name = os.path.join(self.temp_path, 'export.yaml')

        self.RunExport('my-backend-service --region alaska'
                       ' --destination {0}'.format(file_name))

        data = console_io.ReadFromFileOrStdin(file_name or '-', binary=False)
        exported_backend_service = export_util.Import(
            message_type=self.messages.BackendService, stream=data)
        self.AssertMessagesEqual(self._backend_services[1],
                                 exported_backend_service)
Esempio n. 16
0
    def testExportToFile(self):
        forwarding_rule_ref = self.GetForwardingRuleRef(
            'global-forwarding-rule-1', region='alaska')
        self.ExpectGetRequest(forwarding_rule_ref=forwarding_rule_ref,
                              forwarding_rule=self._forwarding_rules[0])

        file_name = os.path.join(self.temp_path, 'export.yaml')

        self.RunExport('global-forwarding-rule-1 --region alaska'
                       ' --destination {0}'.format(file_name))

        data = console_io.ReadFromFileOrStdin(file_name or '-', binary=False)
        exported_forwarding_rule = export_util.Import(
            message_type=self.messages.ForwardingRule, stream=data)
        self.AssertMessagesEqual(self._forwarding_rules[0],
                                 exported_forwarding_rule)
Esempio n. 17
0
    def testExportToFile(self):
        # Regional urlmaps are only applicable for alpha and beta
        url_map_ref = self.GetUrlMapRef('url-map-1', region='alaska')
        url_map = self.MakeTestUrlMap(self.messages, self._api)

        self.ExpectGetRequest(url_map_ref=url_map_ref, url_map=url_map)

        file_name = os.path.join(self.temp_path, 'export.yaml')

        self.RunExport('url-map-1 --region alaska'
                       ' --destination {0}'.format(file_name))

        data = console_io.ReadFromFileOrStdin(file_name or '-', binary=False)
        exported_url_map = export_util.Import(
            message_type=self.messages.UrlMap, stream=data)
        self.AssertMessagesEqual(url_map, exported_url_map)
  def Run(self, args):
    holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
    client = holder.client

    # Import the virtual machine instance configuration specification.
    schema_path = self.GetSchemaPath(for_help=False)
    data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)
    instance = export_util.Import(
        message_type=client.messages.Instance,
        stream=data,
        schema_path=schema_path)

    # Confirm imported instance has base64 fingerprint.
    if not instance.fingerprint:
      raise exceptions.InvalidUserInputError(
          '"{}" is missing the instance\'s base64 fingerprint field.'.format(
              args.source))

    # Retrieve specified instance reference.
    instance_ref = flags.INSTANCE_ARG.ResolveAsResource(
        args,
        holder.resources,
        scope_lister=compute_flags.GetDefaultScopeLister(client))

    # Process update-constraint args.
    most_disruptive_allowed_action = arg_utils.ChoiceToEnum(
        args.most_disruptive_allowed_action,
        client.messages.ComputeInstancesUpdateRequest
        .MostDisruptiveAllowedActionValueValuesEnum)
    minimal_action = arg_utils.ChoiceToEnum(
        args.minimal_action, client.messages.ComputeInstancesUpdateRequest
        .MinimalActionValueValuesEnum)

    # Prepare and send the update request.
    request = client.messages.ComputeInstancesUpdateRequest(
        instance=instance.name,
        project=instance_ref.project,
        zone=instance_ref.zone,
        instanceResource=instance,
        minimalAction=minimal_action,
        mostDisruptiveAllowedAction=most_disruptive_allowed_action)
    if self._support_secure_tag and args.clear_secure_tag:
      request.clearSecureTag = True

    client.MakeRequests([(client.apitools_client.instances, 'Update', request)])
    return
Esempio n. 19
0
  def Run(self, args):
    dataproc = dp.Dataproc(self.ReleaseTrack())
    msgs = dataproc.messages

    data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)
    try:
      cluster = export_util.Import(
          message_type=msgs.Cluster,
          stream=data,
          schema_path=self.GetSchemaPath())
    except yaml_validator.ValidationError as e:
      raise exceptions.ValidationError(e.message)

    cluster_ref = dp_util.ParseCluster(args.name, dataproc)
    cluster.clusterName = cluster_ref.clusterName
    cluster.projectId = cluster_ref.projectId

    # Import only supports create, not update (for now).
    return clusters.CreateCluster(dataproc, cluster, args.async, args.timeout)
Esempio n. 20
0
def _Run(args, holder, url_map_arg, release_track):
    """Issues requests necessary to import URL maps."""
    client = holder.client

    url_map_ref = url_map_arg.ResolveAsResource(
        args,
        holder.resources,
        default_scope=compute_scope.ScopeEnum.GLOBAL,
        scope_lister=compute_flags.GetDefaultScopeLister(client))

    data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)

    try:
        url_map = export_util.Import(message_type=client.messages.UrlMap,
                                     stream=data,
                                     schema_path=_GetSchemaPath(release_track))
    except yaml_validator.ValidationError as e:
        raise exceptions.ToolException(str(e))

    # Get existing URL map.
    try:
        url_map_old = url_maps_utils.SendGetRequest(client, url_map_ref)
    except apitools_exceptions.HttpError as error:
        if error.status_code != 404:
            raise error
        # Url Map does not exist, create a new one.
        return _SendInsertRequest(client, url_map_ref, url_map)

    # No change, do not send requests to server.
    if url_map_old == url_map:
        return

    console_io.PromptContinue(
        message=('Url Map [{0}] will be overwritten.').format(
            url_map_ref.Name()),
        cancel_on_no=True)

    # Populate id and fingerprint fields. These two fields are manually
    # removed from the schema files.
    url_map.id = url_map_old.id
    url_map.fingerprint = url_map_old.fingerprint

    return _SendPatchRequest(client, url_map_ref, url_map)
Esempio n. 21
0
    def Run(self, args):
        holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
        client = holder.client

        target_grpc_proxy_ref = self.TARGET_GRPC_PROXY_ARG.ResolveAsResource(
            args,
            holder.resources,
            default_scope=compute_scope.ScopeEnum.GLOBAL)

        data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)

        try:
            target_grpc_proxy = export_util.Import(
                message_type=client.messages.TargetGrpcProxy,
                stream=data,
                schema_path=self.GetSchemaPath())
        except yaml_validator.ValidationError as e:
            raise compute_exceptions.ValidationError(str(e))

        # Get existing target gRPC proxy.
        try:
            target_grpc_proxy_old = _Describe(holder, target_grpc_proxy_ref)
        except apitools_exceptions.HttpError as error:
            if error.status_code != 404:
                raise error
            # Target gRPC proxy does not exit, create a new one.
            return _Create(holder, target_grpc_proxy, target_grpc_proxy_ref)

        if target_grpc_proxy_old == target_grpc_proxy:
            return

        console_io.PromptContinue(
            message=('Target Grpc Proxy [{0}] will be overwritten.').format(
                target_grpc_proxy_ref.Name()),
            cancel_on_no=True)

        # Populate id and fingerprint fields. These two fields are manually
        # removed from the schema files.
        target_grpc_proxy.id = target_grpc_proxy_old.id
        target_grpc_proxy.fingerprint = target_grpc_proxy_old.fingerprint

        return _Patch(client, target_grpc_proxy_ref, target_grpc_proxy)
Esempio n. 22
0
    def testExportWorkflowTemplatesToFile(self):
        dataproc = dp.Dataproc(calliope.base.ReleaseTrack.GA)
        msgs = dataproc.messages
        workflow_template = self.MakeWorkflowTemplate(labels={'foo': 'bar'})

        # Expected output has template-specific info cleared.
        expected_output = copy.deepcopy(workflow_template)
        expected_output.id = None
        expected_output.name = None

        self.ExpectGetWorkflowTemplate(response=workflow_template)
        file_name = os.path.join(self.temp_path, 'template.yaml')
        result = self.RunDataproc(
            'workflow-templates export {0} --destination {1}'.format(
                self.WORKFLOW_TEMPLATE, file_name))
        self.assertIsNone(result)
        data = console_io.ReadFromFileOrStdin(file_name or '-', binary=False)
        exported_template = export_util.Import(
            message_type=msgs.WorkflowTemplate, stream=data)
        self.AssertMessagesEqual(expected_output, exported_template)
Esempio n. 23
0
  def Run(self, args):
    dataproc = dp.Dataproc(self.ReleaseTrack())
    msgs = dataproc.messages

    template_ref = args.CONCEPTS.template.Parse()
    # TODO(b/109837200) make the dataproc discovery doc parameters consistent
    # Parent() fails for the collection because of projectId/projectsId and
    # regionId/regionsId inconsistencies.
    # parent = template_ref.Parent().RelativePath()
    parent = '/'.join(template_ref.RelativeName().split('/')[0:4])

    data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)
    try:
      template = export_util.Import(
          message_type=msgs.WorkflowTemplate,
          stream=data,
          schema_path=self.GetSchemaPath())
    except yaml_validator.ValidationError as e:
      raise exceptions.ValidationError(e.message)

    # Populate id field.
    template.id = template_ref.Name()

    try:
      old_template = dataproc.GetRegionsWorkflowTemplate(template_ref)
    except apitools_exceptions.HttpError as error:
      if error.status_code != 404:
        raise error
      # Template does not exist. Create a new one.
      request = msgs.DataprocProjectsRegionsWorkflowTemplatesCreateRequest(
          parent=parent, workflowTemplate=template)
      return dataproc.client.projects_regions_workflowTemplates.Create(request)
    # Update the existing template.
    console_io.PromptContinue(
        message=('Workflow template [{0}] will be overwritten.').format(
            template.id),
        cancel_on_no=True)
    # Populate version field and name field.
    template.version = old_template.version
    template.name = template_ref.RelativeName()
    return dataproc.client.projects_regions_workflowTemplates.Update(template)
Esempio n. 24
0
    def _testExportClustersToFile(self, expected_region, region_flag=''):
        dataproc = dp.Dataproc(self.track)
        msgs = dataproc.messages
        cluster = self.MakeCluster()

        # Expected output has cluster-specific info cleared.
        expected_output = copy.deepcopy(cluster)
        expected_output.clusterName = None
        expected_output.projectId = None

        self.ExpectGetCluster(cluster, region=expected_region)

        file_name = os.path.join(self.temp_path, 'cluster.yaml')
        result = self.RunDataproc(
            'clusters export {0} --destination {1} {2}'.format(
                self.CLUSTER_NAME, file_name, region_flag))
        self.assertIsNone(result)
        data = console_io.ReadFromFileOrStdin(file_name or '-', binary=False)
        exported_template = export_util.Import(message_type=msgs.Cluster,
                                               stream=data)
        self.AssertMessagesEqual(expected_output, exported_template)
    def Run(self, args):
        holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
        client = holder.client

        url_map_ref = self.URL_MAP_ARG.ResolveAsResource(
            args,
            holder.resources,
            scope_lister=compute_flags.GetDefaultScopeLister(client))

        data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)

        try:
            url_map = export_util.Import(message_type=client.messages.UrlMap,
                                         stream=data,
                                         schema_path=self.GetSchemaPath())
        except yaml_validator.ValidationError as e:
            raise exceptions.ToolException(e.message)

        # Get existing URL map.
        get_request = url_maps_utils.ComposeGetRequest(client, url_map_ref)
        url_map_old = client.MakeRequests([get_request])[0]

        # No change, do not send requests to server.
        if url_map_old == url_map:
            return

        console_io.PromptContinue(
            message=('Url Map [{0}] will be overwritten.').format(
                url_map_ref.Name()),
            cancel_on_no=True)

        # Populate id and fingerprint fields. These two fields are manually
        # removed from the schema files.
        url_map.id = url_map_old.id
        url_map.fingerprint = url_map_old.fingerprint

        patch_request = self.ComposePatchRequest(client, url_map_ref, url_map)

        return client.MakeRequests([patch_request])
Esempio n. 26
0
def ReadAutoscalingPolicy(dataproc, policy_id, policy_file_name=None):
    """Returns autoscaling policy read from YAML file.

  Args:
    dataproc: wrapper for dataproc resources, client and messages.
    policy_id: The autoscaling policy id (last piece of the resource name).
    policy_file_name: if set, location of the YAML file to read from. Otherwise,
      reads from stdin.

  Raises:
    argparse.ArgumentError if duration formats are invalid or out of bounds.
  """
    data = console_io.ReadFromFileOrStdin(policy_file_name or '-',
                                          binary=False)
    policy = export_util.Import(
        message_type=dataproc.messages.AutoscalingPolicy, stream=data)

    # Ignore user set id in the file (if any), and overwrite with the policy_ref
    # provided with this command
    policy.id = policy_id

    # Similarly, ignore the set resource name. This field is OUTPUT_ONLY, so we
    # can just clear it.
    policy.name = None

    # Set duration fields to their seconds values
    if policy.basicAlgorithm is not None:
        if policy.basicAlgorithm.cooldownPeriod is not None:
            policy.basicAlgorithm.cooldownPeriod = str(
                arg_parsers.Duration(lower_bound='2m', upper_bound='1d')(
                    policy.basicAlgorithm.cooldownPeriod)) + 's'
        if policy.basicAlgorithm.yarnConfig.gracefulDecommissionTimeout is not None:
            policy.basicAlgorithm.yarnConfig.gracefulDecommissionTimeout = str(
                arg_parsers.Duration(lower_bound='0s', upper_bound='1d')(
                    policy.basicAlgorithm.yarnConfig.
                    gracefulDecommissionTimeout)) + 's'

    return policy
Esempio n. 27
0
    def testExportAutoscalingPolicies_destinationFile(self):
        mocked_response = self.MakeAutoscalingPolicy('fake-project',
                                                     'antarctica-north42',
                                                     'policy-1')
        self.mock_client.projects_regions_autoscalingPolicies.Get.Expect(
            self.messages.DataprocProjectsRegionsAutoscalingPoliciesGetRequest(
                name=
                'projects/fake-project/regions/antarctica-north42/autoscalingPolicies/policy-1'
            ),
            response=mocked_response)

        # Export clears id/name, since they cannot be set in import
        expected_policy = copy.deepcopy(mocked_response)
        expected_policy.id = None
        expected_policy.name = None

        file_name = os.path.join(self.temp_path, 'template.yaml')
        self.RunDataproc(
            'autoscaling-policies export policy-1 --destination {}'.format(
                file_name))
        contents = console_io.ReadFromFileOrStdin(file_name, binary=False)
        exported_message = export_util.Import(
            message_type=self.messages.AutoscalingPolicy, stream=contents)
        self.AssertMessagesEqual(expected_policy, exported_message)
def _Run(args, holder, target_https_proxy_arg, release_track):
    """Issues requests necessary to import target HTTPS proxies."""
    client = holder.client
    resources = holder.resources

    target_https_proxy_ref = target_https_proxy_arg.ResolveAsResource(
        args,
        holder.resources,
        default_scope=compute_scope.ScopeEnum.GLOBAL,
        scope_lister=compute_flags.GetDefaultScopeLister(client))

    data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)

    try:
        target_https_proxy = export_util.Import(
            message_type=client.messages.TargetHttpsProxy,
            stream=data,
            schema_path=_GetSchemaPath(release_track))
    except yaml_validator.ValidationError as e:
        raise compute_exceptions.ValidationError(str(e))

    # Get existing target HTTPS proxy.
    try:
        old_target_https_proxy = target_https_proxies_utils.SendGetRequest(
            client, target_https_proxy_ref)
    except apitools_exceptions.HttpError as error:
        if error.status_code != 404:
            raise error
        # Target HTTPS proxy does not exist, create a new one.
        return _SendInsertRequest(client, resources, target_https_proxy_ref,
                                  target_https_proxy)

    if old_target_https_proxy == target_https_proxy:
        return

    console_io.PromptContinue(
        message=('Target Https Proxy [{0}] will be overwritten.').format(
            target_https_proxy_ref.Name()),
        cancel_on_no=True)

    # Populate id and fingerprint fields. These two fields are manually
    # removed from the schema files.
    target_https_proxy.id = old_target_https_proxy.id

    if hasattr(old_target_https_proxy, 'fingerprint'):
        target_https_proxy.fingerprint = old_target_https_proxy.fingerprint

    # Unspecified fields are assumed to be cleared.
    cleared_fields = []
    if target_https_proxy.description is None:
        cleared_fields.append('description')
    if target_https_proxy.serverTlsPolicy is None:
        cleared_fields.append('serverTlsPolicy')
    if target_https_proxy.authorizationPolicy is None:
        cleared_fields.append('authorizationPolicy')
    if hasattr(target_https_proxy,
               'certificateMap') and target_https_proxy.certificateMap is None:
        cleared_fields.append('certificateMap')
    if hasattr(target_https_proxy,
               'httpFilters') and not target_https_proxy.httpFilters:
        cleared_fields.append('httpFilters')
    if target_https_proxy.proxyBind is None:
        cleared_fields.append('proxyBind')
    if target_https_proxy.quicOverride is None:
        cleared_fields.append('quicOverride')
    if not target_https_proxy.sslCertificates:
        cleared_fields.append('sslCertificates')
    if target_https_proxy.sslPolicy is None:
        cleared_fields.append('sslPolicy')
    if target_https_proxy.urlMap is None:
        cleared_fields.append('urlMap')

    with client.apitools_client.IncludeFields(cleared_fields):
        return _SendPatchRequest(client, resources, target_https_proxy_ref,
                                 target_https_proxy)
def _Run(args, holder, target_http_proxy_arg, release_track):
    """Issues requests necessary to import target HTTP proxies."""
    client = holder.client
    resources = holder.resources

    target_http_proxy_ref = target_http_proxy_arg.ResolveAsResource(
        args,
        holder.resources,
        default_scope=compute_scope.ScopeEnum.GLOBAL,
        scope_lister=compute_flags.GetDefaultScopeLister(client))

    data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False)

    try:
        target_http_proxy = export_util.Import(
            message_type=client.messages.TargetHttpProxy,
            stream=data,
            schema_path=_GetSchemaPath(release_track))
    except yaml_validator.ValidationError as e:
        raise compute_exceptions.ValidationError(str(e))

    # Get existing target HTTP proxy.
    try:
        target_http_proxy_old = target_http_proxies_utils.SendGetRequest(
            client, target_http_proxy_ref)
    except apitools_exceptions.HttpError as error:
        if error.status_code != 404:
            raise error
        # Target HTTP proxy does not exist, create a new one.
        return _SendInsertRequest(client, resources, target_http_proxy_ref,
                                  target_http_proxy)

    if target_http_proxy_old == target_http_proxy:
        return

    console_io.PromptContinue(
        message=('Target Http Proxy [{0}] will be overwritten.').format(
            target_http_proxy_ref.Name()),
        cancel_on_no=True)

    # Populate id and fingerprint fields. These two fields are manually
    # removed from the schema files.
    target_http_proxy.id = target_http_proxy_old.id
    target_http_proxy.fingerprint = target_http_proxy_old.fingerprint

    # Unspecified fields are assumed to be cleared.
    cleared_fields = []
    if target_http_proxy.description is None:
        cleared_fields.append('description')

    # The REST API will reject requests without the UrlMap. However, we want to
    # avoid doing partial validations in the client and rely on server side
    # behavior.
    if target_http_proxy.urlMap is None:
        cleared_fields.append('urlMap')
    if release_track != base.ReleaseTrack.GA:
        if target_http_proxy.proxyBind is None:
            cleared_fields.append('proxyBind')

    with client.apitools_client.IncludeFields(cleared_fields):
        return _SendPatchRequest(client, resources, target_http_proxy_ref,
                                 target_http_proxy)
 def ImportResourceMessage(self, yaml_file, message_name):
     """Import a messages class instance typed by name from a YAML file."""
     data = console_io.ReadFromFileOrStdin(yaml_file, binary=False)
     message_type = self.GetMessage(message_name)
     return export_util.Import(message_type=message_type, stream=data)