def Run(self, args): dataproc = dp.Dataproc(self.ReleaseTrack()) messages = dataproc.messages policy_ref = args.CONCEPTS.autoscaling_policy.Parse() request = messages.DataprocProjectsRegionsAutoscalingPoliciesGetRequest( name=policy_ref.RelativeName()) policy = dataproc.client.projects_regions_autoscalingPolicies.Get( request) schema_path = export_util.GetSchemaPath('dataproc', dataproc.api_version, 'AutoscalingPolicy', for_help=False) if args.destination: with files.FileWriter(args.destination) as stream: export_util.Export(message=policy, stream=stream, schema_path=schema_path) else: # Print to stdout export_util.Export(message=policy, stream=sys.stdout, schema_path=schema_path)
def Run(self, args): dataproc = dp.Dataproc(self.ReleaseTrack()) msgs = dataproc.messages # Generate uuid for request. instance_id = uuid.uuid4().hex regions_ref = dp_util.ParseRegion(dataproc) # Read template from YAML file and validate it using a schema. data = console_io.ReadFromFileOrStdin(args.file or '-', binary=False) template = export_util.Import(message_type=msgs.WorkflowTemplate, stream=data, schema_path=export_util.GetSchemaPath( 'dataproc', api_version='v1beta2', message_name='WorkflowTemplate')) # Send instantiate inline request. request = \ msgs.DataprocProjectsRegionsWorkflowTemplatesInstantiateInlineRequest( instanceId=instance_id, parent=regions_ref.RelativeName(), workflowTemplate=template) operation = \ dataproc.client.projects_regions_workflowTemplates.InstantiateInline( request) if args.async_: log.status.Print('Instantiating with operation [{0}].'.format( operation.name)) return operation = dp_util.WaitForWorkflowTemplateOperation( dataproc, operation) return operation
def _GetSchemaPath(release_track, for_help=False): """Returns the resource schema path.""" return export_util.GetSchemaPath( 'compute', _GetApiVersion(release_track), 'TargetHttpProxy', for_help=for_help)
def AddExportArgs(parser, verb, api_version, resource_message_name): AddAutoscalingPolicyResourceArg(parser, verb, api_version) schema_path = export_util.GetSchemaPath('dataproc', api_version, resource_message_name, for_help=True) export_util.AddExportFlags(parser, schema_path)
def ReadAutoscalingPolicy(dataproc, policy_id, policy_file_name=None): """Returns autoscaling policy read from YAML file. Validates it using the schema for the API version corresponding to the dataproc instance, and backfills necessary fields. Args: dataproc: wrapper for dataproc resources, client and messages. policy_id: The autoscaling policy id (last piece of the resource name). policy_file_name: if set, location of the YAML file to read from. Otherwise, reads from stdin. Raises: argparse.ArgumentError if duration formats are invalid or out of bounds. """ # Read template from YAML file, validate it using the schema for the # API version corresponding to the dataproc instance. data = console_io.ReadFromFileOrStdin(policy_file_name or '-', binary=False) schema_path = export_util.GetSchemaPath('dataproc', dataproc.api_version, 'AutoscalingPolicy', for_help=False) try: policy = export_util.Import( message_type=dataproc.messages.AutoscalingPolicy, stream=data, schema_path=schema_path) except yaml_validator.ValidationError as e: raise exceptions.ValidationError(e.message) # Ignore user set id in the file (if any), and overwrite with the policy_ref # provided with this command policy.id = policy_id # Similarly, ignore the set resource name. This field is OUTPUT_ONLY, so we # can just clear it. policy.name = None # Set duration fields to their seconds values if policy.basicAlgorithm.cooldownPeriod is not None: policy.basicAlgorithm.cooldownPeriod = str( arg_parsers.Duration(lower_bound='2m', upper_bound='1d')( policy.basicAlgorithm.cooldownPeriod)) + 's' if policy.basicAlgorithm.yarnConfig.gracefulDecommissionTimeout is not None: policy.basicAlgorithm.yarnConfig.gracefulDecommissionTimeout = str( arg_parsers.Duration(lower_bound='0s', upper_bound='1d')( policy.basicAlgorithm.yarnConfig.gracefulDecommissionTimeout) ) + 's' return policy
def GetSchemaPath(cls, for_help=False): """Returns the resource schema path.""" return export_util.GetSchemaPath( 'dataproc', cls.GetApiVersion(), 'Cluster', for_help=for_help)
def GetSchemaPath(cls, for_help=False): """Returns the resource schema path.""" return export_util.GetSchemaPath('compute', cls.GetApiVersion(), 'Instance', for_help=for_help)
def GetSchemaPath(api_version, for_help=False): """Returns the resource schema path.""" return export_util.GetSchemaPath('dataproc', api_version, 'WorkflowTemplate', for_help=for_help)
def GetSchemaPath(): # TODO(b/205222056): Move the substitution rule file schema to v1 directory in # the schema directory. return export_util.GetSchemaPath('gkebackup', 'v1alpha1', 'SubstitutionRules')