def _get_gca_resource( self, resource_name: str, parent_resource_name_fields: Optional[Dict[str, str]] = None, ) -> proto.Message: """Returns GAPIC service representation of client class resource. Args: resource_name (str): Required. A fully-qualified resource name or ID. parent_resource_name_fields (Dict[str,str]): Optional. Mapping of parent resource name key to values. These will be used to compose the resource name if only resource ID is given. Should not include project and location. """ resource_name = utils.full_resource_name( resource_name=resource_name, resource_noun=self._resource_noun, parse_resource_name_method=self._parse_resource_name, format_resource_name_method=self._format_resource_name, project=self.project, location=self.location, parent_resource_name_fields=parent_resource_name_fields, resource_id_validator=self._resource_id_validator, ) return getattr(self.api_client, self._getter_method)(name=resource_name, retry=_DEFAULT_RETRY)
def list( cls, tensorboard_name: str, filter: Optional[str] = None, order_by: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, ) -> List["TensorboardExperiment"]: """List TensorboardExperiemnts in a Tensorboard resource. Example Usage: aiplatform.TensorboardExperiment.list( tensorboard_name='projects/my-project/locations/us-central1/tensorboards/123' ) Args: tensorboard_name(str): Required. The resource name or resource ID of the Tensorboard to list TensorboardExperiments. Format, if resource name: 'projects/{project}/locations/{location}/tensorboards/{tensorboard}' filter (str): Optional. An expression for filtering the results of the request. For field names both snake_case and camelCase are supported. order_by (str): Optional. A comma-separated list of fields to order by, sorted in ascending order. Use "desc" after a field name for descending. Supported fields: `display_name`, `create_time`, `update_time` project (str): Optional. Project to retrieve list from. If not set, project set in aiplatform.init will be used. location (str): Optional. Location to retrieve list from. If not set, location set in aiplatform.init will be used. credentials (auth_credentials.Credentials): Optional. Custom credentials to use to retrieve list. Overrides credentials set in aiplatform.init. Returns: List[TensorboardExperiment] - A list of TensorboardExperiments """ parent = utils.full_resource_name( resource_name=tensorboard_name, resource_noun=Tensorboard._resource_noun, parse_resource_name_method=Tensorboard._parse_resource_name, format_resource_name_method=Tensorboard._format_resource_name, project=project, location=location, ) return super()._list( filter=filter, order_by=order_by, project=project, location=location, credentials=credentials, parent=parent, )
def __init__( self, resource_name: Optional[str] = None, resource: Optional[ Union[gca_context.Context, gca_artifact.Artifact, gca_execution.Execution] ] = None, metadata_store_id: str = "default", project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, ): """Retrieves an existing Metadata resource given a resource name or ID. Args: resource_name (str): A fully-qualified resource name or ID Example: "projects/123/locations/us-central1/metadataStores/default/<resource_noun>/my-resource". or "my-resource" when project and location are initialized or passed. if ``resource`` is provided, this should not be set. resource (Union[gca_context.Context, gca_artifact.Artifact, gca_execution.Execution]): The proto.Message that contains the full information of the resource. If both set, this field overrides ``resource_name`` field. metadata_store_id (str): MetadataStore to retrieve resource from. If not set, metadata_store_id is set to "default". If resource_name is a fully-qualified resource, its metadata_store_id overrides this one. project (str): Optional project to retrieve the resource from. If not set, project set in aiplatform.init will be used. location (str): Optional location to retrieve the resource from. If not set, location set in aiplatform.init will be used. credentials (auth_credentials.Credentials): Custom credentials to use to upload this model. Overrides credentials set in aiplatform.init. """ super().__init__( project=project, location=location, credentials=credentials, ) if resource: self._gca_resource = resource else: full_resource_name = utils.full_resource_name( resource_name=resource_name, resource_noun=self._resource_noun, parse_resource_name_method=self._parse_resource_name, format_resource_name_method=self._format_resource_name, parent_resource_name_fields={ metadata.metadata_store._MetadataStore._resource_noun: metadata_store_id }, project=self.project, location=self.location, ) self._gca_resource = getattr(self.api_client, self._getter_method)( name=full_resource_name, retry=base._DEFAULT_RETRY )
def _get_gca_resource(self, resource_name: str) -> proto.Message: """Returns GAPIC service representation of client class resource.""" """ Args: resource_name (str): Required. A fully-qualified resource name or ID. """ resource_name = utils.full_resource_name( resource_name=resource_name, resource_noun=self._resource_noun, project=self.project, location=self.location, ) return getattr(self.api_client, self._getter_method)(name=resource_name)
def create( cls, job_display_name: str, model_name: str, instances_format: str = "jsonl", predictions_format: str = "jsonl", gcs_source: Optional[Union[str, Sequence[str]]] = None, bigquery_source: Optional[str] = None, gcs_destination_prefix: Optional[str] = None, bigquery_destination_prefix: Optional[str] = None, model_parameters: Optional[Dict] = None, machine_type: Optional[str] = None, accelerator_type: Optional[str] = None, accelerator_count: Optional[int] = None, starting_replica_count: Optional[int] = None, max_replica_count: Optional[int] = None, generate_explanation: Optional[bool] = False, explanation_metadata: Optional[ "aiplatform.explain.ExplanationMetadata"] = None, explanation_parameters: Optional[ "aiplatform.explain.ExplanationParameters"] = None, labels: Optional[dict] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, encryption_spec_key_name: Optional[str] = None, sync: bool = True, ) -> "BatchPredictionJob": """Create a batch prediction job. Args: job_display_name (str): Required. The user-defined name of the BatchPredictionJob. The name can be up to 128 characters long and can be consist of any UTF-8 characters. model_name (str): Required. A fully-qualified model resource name or model ID. Example: "projects/123/locations/us-central1/models/456" or "456" when project and location are initialized or passed. instances_format (str): Required. The format in which instances are given, must be one of "jsonl", "csv", "bigquery", "tf-record", "tf-record-gzip", or "file-list". Default is "jsonl" when using `gcs_source`. If a `bigquery_source` is provided, this is overriden to "bigquery". predictions_format (str): Required. The format in which AI Platform gives the predictions, must be one of "jsonl", "csv", or "bigquery". Default is "jsonl" when using `gcs_destination_prefix`. If a `bigquery_destination_prefix` is provided, this is overriden to "bigquery". gcs_source (Optional[Sequence[str]]): Google Cloud Storage URI(-s) to your instances to run batch prediction on. They must match `instances_format`. May contain wildcards. For more information on wildcards, see https://cloud.google.com/storage/docs/gsutil/addlhelp/WildcardNames. bigquery_source (Optional[str]): BigQuery URI to a table, up to 2000 characters long. For example: `projectId.bqDatasetId.bqTableId` gcs_destination_prefix (Optional[str]): The Google Cloud Storage location of the directory where the output is to be written to. In the given directory a new directory is created. Its name is ``prediction-<model-display-name>-<job-create-time>``, where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. Inside of it files ``predictions_0001.<extension>``, ``predictions_0002.<extension>``, ..., ``predictions_N.<extension>`` are created where ``<extension>`` depends on chosen ``predictions_format``, and N may equal 0001 and depends on the total number of successfully predicted instances. If the Model has both ``instance`` and ``prediction`` schemata defined then each such file contains predictions as per the ``predictions_format``. If prediction for any instance failed (partially or completely), then an additional ``errors_0001.<extension>``, ``errors_0002.<extension>``,..., ``errors_N.<extension>`` files are created (N depends on total number of failed predictions). These files contain the failed instances, as per their schema, followed by an additional ``error`` field which as value has ```google.rpc.Status`` <Status>`__ containing only ``code`` and ``message`` fields. bigquery_destination_prefix (Optional[str]): The BigQuery project location where the output is to be written to. In the given project a new dataset is created with name ``prediction_<model-display-name>_<job-create-time>`` where is made BigQuery-dataset-name compatible (for example, most special characters become underscores), and timestamp is in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset two tables will be created, ``predictions``, and ``errors``. If the Model has both ``instance`` and ``prediction`` schemata defined then the tables have columns as follows: The ``predictions`` table contains instances for which the prediction succeeded, it has columns as per a concatenation of the Model's instance and prediction schemata. The ``errors`` table contains rows for which the prediction has failed, it has instance columns, as per the instance schema, followed by a single "errors" column, which as values has ```google.rpc.Status`` <Status>`__ represented as a STRUCT, and containing only ``code`` and ``message``. model_parameters (Optional[Dict]): The parameters that govern the predictions. The schema of the parameters may be specified via the Model's `parameters_schema_uri`. machine_type (Optional[str]): The type of machine for running batch prediction on dedicated resources. Not specifying machine type will result in batch prediction job being run with automatic resources. accelerator_type (Optional[str]): The type of accelerator(s) that may be attached to the machine as per `accelerator_count`. Only used if `machine_type` is set. accelerator_count (Optional[int]): The number of accelerators to attach to the `machine_type`. Only used if `machine_type` is set. starting_replica_count (Optional[int]): The number of machine replicas used at the start of the batch operation. If not set, AI Platform decides starting number, not greater than `max_replica_count`. Only used if `machine_type` is set. max_replica_count (Optional[int]): The maximum number of machine replicas the batch operation may be scaled to. Only used if `machine_type` is set. Default is 10. generate_explanation (bool): Optional. Generate explanation along with the batch prediction results. This will cause the batch prediction output to include explanations based on the `prediction_format`: - `bigquery`: output includes a column named `explanation`. The value is a struct that conforms to the [aiplatform.gapic.Explanation] object. - `jsonl`: The JSON objects on each line include an additional entry keyed `explanation`. The value of the entry is a JSON object that conforms to the [aiplatform.gapic.Explanation] object. - `csv`: Generating explanations for CSV format is not supported. explanation_metadata (aiplatform.explain.ExplanationMetadata): Optional. Explanation metadata configuration for this BatchPredictionJob. Can be specified only if `generate_explanation` is set to `True`. This value overrides the value of `Model.explanation_metadata`. All fields of `explanation_metadata` are optional in the request. If a field of the `explanation_metadata` object is not populated, the corresponding field of the `Model.explanation_metadata` object is inherited. For more details, see `Ref docs <http://tinyurl.com/1igh60kt>` explanation_parameters (aiplatform.explain.ExplanationParameters): Optional. Parameters to configure explaining for Model's predictions. Can be specified only if `generate_explanation` is set to `True`. This value overrides the value of `Model.explanation_parameters`. All fields of `explanation_parameters` are optional in the request. If a field of the `explanation_parameters` object is not populated, the corresponding field of the `Model.explanation_parameters` object is inherited. For more details, see `Ref docs <http://tinyurl.com/1an4zake>` labels (Optional[dict]): The labels with user-defined metadata to organize your BatchPredictionJobs. Label keys and values can be no longer than 64 characters (Unicode codepoints), can only contain lowercase letters, numeric characters, underscores and dashes. International characters are allowed. See https://goo.gl/xmQnxf for more information and examples of labels. credentials (Optional[auth_credentials.Credentials]): Custom credentials to use to create this batch prediction job. Overrides credentials set in aiplatform.init. encryption_spec_key_name (Optional[str]): Optional. The Cloud KMS resource identifier of the customer managed encryption key used to protect the job. Has the form: ``projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key``. The key needs to be in the same region as where the compute resource is created. If this is set, then all resources created by the BatchPredictionJob will be encrypted with the provided encryption key. Overrides encryption_spec_key_name set in aiplatform.init. sync (bool): Whether to execute this method synchronously. If False, this method will be executed in concurrent Future and any downstream object will be immediately returned and synced when the Future has completed. Returns: (jobs.BatchPredictionJob): Instantiated representation of the created batch prediction job. """ utils.validate_display_name(job_display_name) model_name = utils.full_resource_name( resource_name=model_name, resource_noun="models", project=project, location=location, ) # Raise error if both or neither source URIs are provided if bool(gcs_source) == bool(bigquery_source): raise ValueError( "Please provide either a gcs_source or bigquery_source, " "but not both.") # Raise error if both or neither destination prefixes are provided if bool(gcs_destination_prefix) == bool(bigquery_destination_prefix): raise ValueError( "Please provide either a gcs_destination_prefix or " "bigquery_destination_prefix, but not both.") # Raise error if unsupported instance format is provided if instances_format not in constants.BATCH_PREDICTION_INPUT_STORAGE_FORMATS: raise ValueError( f"{predictions_format} is not an accepted instances format " f"type. Please choose from: {constants.BATCH_PREDICTION_INPUT_STORAGE_FORMATS}" ) # Raise error if unsupported prediction format is provided if predictions_format not in constants.BATCH_PREDICTION_OUTPUT_STORAGE_FORMATS: raise ValueError( f"{predictions_format} is not an accepted prediction format " f"type. Please choose from: {constants.BATCH_PREDICTION_OUTPUT_STORAGE_FORMATS}" ) gca_bp_job = gca_bp_job_compat gca_io = gca_io_compat gca_machine_resources = gca_machine_resources_compat select_version = compat.DEFAULT_VERSION if generate_explanation: gca_bp_job = gca_bp_job_v1beta1 gca_io = gca_io_v1beta1 gca_machine_resources = gca_machine_resources_v1beta1 select_version = compat.V1BETA1 gapic_batch_prediction_job = gca_bp_job.BatchPredictionJob() # Required Fields gapic_batch_prediction_job.display_name = job_display_name gapic_batch_prediction_job.model = model_name input_config = gca_bp_job.BatchPredictionJob.InputConfig() output_config = gca_bp_job.BatchPredictionJob.OutputConfig() if bigquery_source: input_config.instances_format = "bigquery" input_config.bigquery_source = gca_io.BigQuerySource() input_config.bigquery_source.input_uri = bigquery_source else: input_config.instances_format = instances_format input_config.gcs_source = gca_io.GcsSource( uris=gcs_source if type(gcs_source) == list else [gcs_source]) if bigquery_destination_prefix: output_config.predictions_format = "bigquery" output_config.bigquery_destination = gca_io.BigQueryDestination() bq_dest_prefix = bigquery_destination_prefix if not bq_dest_prefix.startswith("bq://"): bq_dest_prefix = f"bq://{bq_dest_prefix}" output_config.bigquery_destination.output_uri = bq_dest_prefix else: output_config.predictions_format = predictions_format output_config.gcs_destination = gca_io.GcsDestination( output_uri_prefix=gcs_destination_prefix) gapic_batch_prediction_job.input_config = input_config gapic_batch_prediction_job.output_config = output_config # Optional Fields gapic_batch_prediction_job.encryption_spec = initializer.global_config.get_encryption_spec( encryption_spec_key_name=encryption_spec_key_name, select_version=select_version, ) if model_parameters: gapic_batch_prediction_job.model_parameters = model_parameters # Custom Compute if machine_type: machine_spec = gca_machine_resources.MachineSpec() machine_spec.machine_type = machine_type machine_spec.accelerator_type = accelerator_type machine_spec.accelerator_count = accelerator_count dedicated_resources = gca_machine_resources.BatchDedicatedResources( ) dedicated_resources.machine_spec = machine_spec dedicated_resources.starting_replica_count = starting_replica_count dedicated_resources.max_replica_count = max_replica_count gapic_batch_prediction_job.dedicated_resources = dedicated_resources gapic_batch_prediction_job.manual_batch_tuning_parameters = None # User Labels gapic_batch_prediction_job.labels = labels # Explanations if generate_explanation: gapic_batch_prediction_job.generate_explanation = generate_explanation if explanation_metadata or explanation_parameters: gapic_batch_prediction_job.explanation_spec = gca_explanation_v1beta1.ExplanationSpec( metadata=explanation_metadata, parameters=explanation_parameters) # TODO (b/174502913): Support private feature once released api_client = cls._instantiate_client(location=location, credentials=credentials) return cls._create( api_client=api_client, parent=initializer.global_config.common_location_path( project=project, location=location), batch_prediction_job=gapic_batch_prediction_job, generate_explanation=generate_explanation, project=project or initializer.global_config.project, location=location or initializer.global_config.location, credentials=credentials or initializer.global_config.credentials, sync=sync, )
def create( cls, tensorboard_run_id: str, tensorboard_experiment_name: str, tensorboard_id: Optional[str] = None, display_name: Optional[str] = None, description: Optional[str] = None, labels: Optional[Dict[str, str]] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, request_metadata: Sequence[Tuple[str, str]] = (), ) -> "TensorboardRun": """Creates a new tensorboard. Example Usage: tb = aiplatform.TensorboardExperiment.create( tensorboard_experiment_id='my-experiment' tensorboard_id='456' display_name='my display name', description='my description', labels={ 'key1': 'value1', 'key2': 'value2' } ) Args: tensorboard_run_id (str): Required. The ID to use for the Tensorboard run, which will become the final component of the Tensorboard run's resource name. This value should be 1-128 characters, and valid: characters are /[a-z][0-9]-/. tensorboard_experiment_name (str): Required. The resource name or ID of the TensorboardExperiment to create the TensorboardRun in. Resource name format: ``projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}`` If resource ID is provided then tensorboard_id must be provided. tensorboard_id (str): Optional. The resource ID of the Tensorboard to create the TensorboardRun in. Format of resource name. display_name (str): Optional. The user-defined name of the Tensorboard Run. This value must be unique among all TensorboardRuns belonging to the same parent TensorboardExperiment. If not provided tensorboard_run_id will be used. description (str): Optional. Description of this Tensorboard Run. labels (Dict[str, str]): Optional. Labels with user-defined metadata to organize your Tensorboards. Label keys and values can be no longer than 64 characters (Unicode codepoints), can only contain lowercase letters, numeric characters, underscores and dashes. International characters are allowed. No more than 64 user labels can be associated with one Tensorboard (System labels are excluded). See https://goo.gl/xmQnxf for more information and examples of labels. System reserved label keys are prefixed with "aiplatform.googleapis.com/" and are immutable. project (str): Optional. Project to upload this model to. Overrides project set in aiplatform.init. location (str): Optional. Location to upload this model to. Overrides location set in aiplatform.init. credentials (auth_credentials.Credentials): Optional. Custom credentials to use to upload this model. Overrides credentials set in aiplatform.init. request_metadata (Sequence[Tuple[str, str]]): Optional. Strings which should be sent along with the request as metadata. Returns: TensorboardExperiment: The TensorboardExperiment resource. """ if display_name: utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) display_name = display_name or tensorboard_run_id api_client = cls._instantiate_client(location=location, credentials=credentials) parent = utils.full_resource_name( resource_name=tensorboard_experiment_name, resource_noun=TensorboardExperiment._resource_noun, parse_resource_name_method=TensorboardExperiment. _parse_resource_name, format_resource_name_method=TensorboardExperiment. _format_resource_name, parent_resource_name_fields={ Tensorboard._resource_noun: tensorboard_id }, project=project, location=location, ) gapic_tensorboard_run = gca_tensorboard_run.TensorboardRun( display_name=display_name, description=description, labels=labels, ) _LOGGER.log_create_with_lro(cls) tensorboard_run = api_client.create_tensorboard_run( parent=parent, tensorboard_run=gapic_tensorboard_run, tensorboard_run_id=tensorboard_run_id, metadata=request_metadata, ) _LOGGER.log_create_complete(cls, tensorboard_run, "tb_run") return cls( tensorboard_run_name=tensorboard_run.name, credentials=credentials, )
def create( cls, feature_id: str, value_type: str, entity_type_name: str, featurestore_id: Optional[str] = None, description: Optional[str] = None, labels: Optional[Dict[str, str]] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, request_metadata: Optional[Sequence[Tuple[str, str]]] = (), sync: bool = True, create_request_timeout: Optional[float] = None, ) -> "Feature": """Creates a Feature resource in an EntityType. Example Usage: my_feature = aiplatform.Feature.create( feature_id='my_feature_id', value_type='INT64', entity_type_name='projects/123/locations/us-central1/featurestores/my_featurestore_id/\ entityTypes/my_entity_type_id' ) or my_feature = aiplatform.Feature.create( feature_id='my_feature_id', value_type='INT64', entity_type_name='my_entity_type_id', featurestore_id='my_featurestore_id', ) Args: feature_id (str): Required. The ID to use for the Feature, which will become the final component of the Feature's resource name, which is immutable. This value may be up to 60 characters, and valid characters are ``[a-z0-9_]``. The first character cannot be a number. The value must be unique within an EntityType. value_type (str): Required. Immutable. Type of Feature value. One of BOOL, BOOL_ARRAY, DOUBLE, DOUBLE_ARRAY, INT64, INT64_ARRAY, STRING, STRING_ARRAY, BYTES. entity_type_name (str): Required. A fully-qualified entityType resource name or an entity_type ID of an existing entityType to create Feature in. The EntityType must exist in the Featurestore if provided by the featurestore_id. Example: "projects/123/locations/us-central1/featurestores/my_featurestore_id/entityTypes/my_entity_type_id" or "my_entity_type_id" when project and location are initialized or passed, with featurestore_id passed. featurestore_id (str): Optional. Featurestore ID of an existing featurestore to create Feature in if `entity_type_name` is passed an entity_type ID. description (str): Optional. Description of the Feature. labels (Dict[str, str]): Optional. The labels with user-defined metadata to organize your Features. Label keys and values can be no longer than 64 characters (Unicode codepoints), can only contain lowercase letters, numeric characters, underscores and dashes. International characters are allowed. See https://goo.gl/xmQnxf for more information on and examples of labels. No more than 64 user labels can be associated with one Feature (System labels are excluded)." System reserved label keys are prefixed with "aiplatform.googleapis.com/" and are immutable. project (str): Optional. Project to create Feature in if `entity_type_name` is passed an entity_type ID. If not set, project set in aiplatform.init will be used. location (str): Optional. Location to create Feature in if `entity_type_name` is passed an entity_type ID. If not set, location set in aiplatform.init will be used. credentials (auth_credentials.Credentials): Optional. Custom credentials to use to create Features. Overrides credentials set in aiplatform.init. request_metadata (Sequence[Tuple[str, str]]): Optional. Strings which should be sent along with the request as metadata. sync (bool): Optional. Whether to execute this creation synchronously. If False, this method will be executed in concurrent Future and any downstream object will be immediately returned and synced when the Future has completed. create_request_timeout (float): Optional. The timeout for the create request in seconds. Returns: Feature - feature resource object """ entity_type_name = utils.full_resource_name( resource_name=entity_type_name, resource_noun=featurestore.EntityType._resource_noun, parse_resource_name_method=featurestore.EntityType. _parse_resource_name, format_resource_name_method=featurestore.EntityType. _format_resource_name, parent_resource_name_fields={ featurestore.Featurestore._resource_noun: featurestore_id } if featurestore_id else featurestore_id, project=project, location=location, resource_id_validator=featurestore.EntityType. _resource_id_validator, ) entity_type_name_components = featurestore.EntityType._parse_resource_name( entity_type_name) feature_config = featurestore_utils._FeatureConfig( feature_id=feature_id, value_type=value_type, description=description, labels=labels, ) create_feature_request = feature_config.get_create_feature_request() create_feature_request.parent = entity_type_name api_client = cls._instantiate_client( location=entity_type_name_components["location"], credentials=credentials, ) created_feature_lro = api_client.create_feature( request=create_feature_request, metadata=request_metadata, timeout=create_request_timeout, ) _LOGGER.log_create_with_lro(cls, created_feature_lro) created_feature = created_feature_lro.result() _LOGGER.log_create_complete(cls, created_feature, "feature") feature_obj = cls( feature_name=created_feature.name, project=project, location=location, credentials=credentials, ) return feature_obj
def list( cls, entity_type_name: str, featurestore_id: Optional[str] = None, filter: Optional[str] = None, order_by: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, ) -> List["Feature"]: """Lists existing managed feature resources in an entityType, given an entityType resource name or an entity_type ID. Example Usage: my_features = aiplatform.Feature.list( entity_type_name='projects/123/locations/us-central1/featurestores/my_featurestore_id/\ entityTypes/my_entity_type_id' ) or my_features = aiplatform.Feature.list( entity_type_name='my_entity_type_id', featurestore_id='my_featurestore_id', ) Args: entity_type_name (str): Required. A fully-qualified entityType resource name or an entity_type ID of an existing entityType to list features in. The EntityType must exist in the Featurestore if provided by the featurestore_id. Example: "projects/123/locations/us-central1/featurestores/my_featurestore_id/entityTypes/my_entity_type_id" or "my_entity_type_id" when project and location are initialized or passed, with featurestore_id passed. featurestore_id (str): Optional. Featurestore ID of an existing featurestore to list features in, when entity_type_name is passed as entity_type ID. filter (str): Optional. Lists the Features that match the filter expression. The following filters are supported: - ``value_type``: Supports = and != comparisons. - ``create_time``: Supports =, !=, <, >, >=, and <= comparisons. Values must be in RFC 3339 format. - ``update_time``: Supports =, !=, <, >, >=, and <= comparisons. Values must be in RFC 3339 format. - ``labels``: Supports key-value equality as well as key presence. Examples: - ``value_type = DOUBLE`` --> Features whose type is DOUBLE. - ``create_time > \"2020-01-31T15:30:00.000000Z\" OR update_time > \"2020-01-31T15:30:00.000000Z\"`` --> EntityTypes created or updated after 2020-01-31T15:30:00.000000Z. - ``labels.active = yes AND labels.env = prod`` --> Features having both (active: yes) and (env: prod) labels. - ``labels.env: *`` --> Any Feature which has a label with 'env' as the key. order_by (str): Optional. A comma-separated list of fields to order by, sorted in ascending order. Use "desc" after a field name for descending. Supported fields: - ``feature_id`` - ``value_type`` - ``create_time`` - ``update_time`` project (str): Optional. Project to list features in. If not set, project set in aiplatform.init will be used. location (str): Optional. Location to list features in. If not set, location set in aiplatform.init will be used. credentials (auth_credentials.Credentials): Optional. Custom credentials to use to list features. Overrides credentials set in aiplatform.init. Returns: List[Feature] - A list of managed feature resource objects """ return cls._list( filter=filter, order_by=order_by, project=project, location=location, credentials=credentials, parent=utils.full_resource_name( resource_name=entity_type_name, resource_noun=featurestore.EntityType._resource_noun, parse_resource_name_method=featurestore.EntityType. _parse_resource_name, format_resource_name_method=featurestore.EntityType. _format_resource_name, parent_resource_name_fields={ featurestore.Featurestore._resource_noun: featurestore_id } if featurestore_id else featurestore_id, project=project, location=location, resource_id_validator=featurestore.EntityType. _resource_id_validator, ), )