Пример #1
0
def sample_get_model():
    # Create a client
    client = aiplatform_v1beta1.ModelServiceClient()

    # Initialize request argument(s)
    request = aiplatform_v1beta1.GetModelRequest(name="name_value", )

    # Make the request
    response = client.get_model(request=request)

    # Handle the response
    print(response)
def sample_import_model_evaluation():
    # Create a client
    client = aiplatform_v1beta1.ModelServiceClient()

    # Initialize request argument(s)
    request = aiplatform_v1beta1.ImportModelEvaluationRequest(
        parent="parent_value", )

    # Make the request
    response = client.import_model_evaluation(request=request)

    # Handle the response
    print(response)
def sample_list_model_evaluations():
    # Create a client
    client = aiplatform_v1beta1.ModelServiceClient()

    # Initialize request argument(s)
    request = aiplatform_v1beta1.ListModelEvaluationsRequest(
        parent="parent_value", )

    # Make the request
    page_result = client.list_model_evaluations(request=request)

    # Handle the response
    for response in page_result:
        print(response)
Пример #4
0
def sample_update_model():
    # Create a client
    client = aiplatform_v1beta1.ModelServiceClient()

    # Initialize request argument(s)
    model = aiplatform_v1beta1.Model()
    model.display_name = "display_name_value"

    request = aiplatform_v1beta1.UpdateModelRequest(model=model, )

    # Make the request
    response = client.update_model(request=request)

    # Handle the response
    print(response)
Пример #5
0
def sample_delete_model():
    # Create a client
    client = aiplatform_v1beta1.ModelServiceClient()

    # Initialize request argument(s)
    request = aiplatform_v1beta1.DeleteModelRequest(name="name_value", )

    # Make the request
    operation = client.delete_model(request=request)

    print("Waiting for operation to complete...")

    response = operation.result()

    # Handle the response
    print(response)
Пример #6
0
def sample_upload_model():
    # Create a client
    client = aiplatform_v1beta1.ModelServiceClient()

    # Initialize request argument(s)
    model = aiplatform_v1beta1.Model()
    model.display_name = "display_name_value"

    request = aiplatform_v1beta1.UploadModelRequest(
        parent="parent_value",
        model=model,
    )

    # Make the request
    operation = client.upload_model(request=request)

    print("Waiting for operation to complete...")

    response = operation.result()

    # Handle the response
    print(response)
Пример #7
0
def export_model_tabular_classification_sample(
    project: str,
    model_id: str,
    gcs_destination_output_uri_prefix: str,
    location: str = "us-central1",
    api_endpoint: str = "us-central1-aiplatform.googleapis.com",
    timeout: int = 300,
):
    # The AI Platform services require regional API endpoints.
    client_options = {"api_endpoint": api_endpoint}
    # Initialize client that will be used to create and send requests.
    # This client only needs to be created once, and can be reused for multiple requests.
    client = aiplatform_v1beta1.ModelServiceClient(client_options=client_options)
    gcs_destination = {"output_uri_prefix": gcs_destination_output_uri_prefix}
    output_config = {
        "artifact_destination": gcs_destination,
        "export_format_id": "tf-saved-model",
    }
    name = client.model_path(project=project, location=location, model=model_id)
    response = client.export_model(name=name, output_config=output_config)
    print("Long running operation:", response.operation.name)
    print("output_info:", response.metadata.output_info)
    export_model_response = response.result(timeout=timeout)
    print("export_model_response:", export_model_response)
def upload_model_explain_tabular_managed_container_sample(
    project: str,
    display_name: str,
    container_spec_image_uri: str,
    artifact_uri: str,
    input_tensor_name: str,
    output_tensor_name: str,
    feature_names: list,
    location: str = "us-central1",
    api_endpoint: str = "us-central1-aiplatform.googleapis.com",
    timeout: int = 300,
):
    # The AI Platform services require regional API endpoints.
    client_options = {"api_endpoint": api_endpoint}
    # Initialize client that will be used to create and send requests.
    # This client only needs to be created once, and can be reused for multiple requests.
    client = aiplatform_v1beta1.ModelServiceClient(
        client_options=client_options)

    # Container specification for deploying the model
    container_spec = {
        "image_uri": container_spec_image_uri,
        "command": [],
        "args": []
    }

    # The explainabilty method and corresponding parameters
    parameters = aiplatform_v1beta1.ExplanationParameters(
        {"xrai_attribution": {
            "step_count": 1
        }})

    # The input tensor for feature attribution to the output
    # For single input model, y = f(x), this will be the serving input layer.
    input_metadata = aiplatform_v1beta1.ExplanationMetadata.InputMetadata({
        "input_tensor_name":
        input_tensor_name,
        # Input is tabular data
        "modality":
        "numeric",
        # Assign feature names to the inputs for explanation
        "encoding":
        "BAG_OF_FEATURES",
        "index_feature_mapping":
        feature_names,
    })

    # The output tensor to explain
    # For single output model, y = f(x), this will be the serving output layer.
    output_metadata = aiplatform_v1beta1.ExplanationMetadata.OutputMetadata(
        {"output_tensor_name": output_tensor_name})

    # Assemble the explanation metadata
    metadata = aiplatform_v1beta1.ExplanationMetadata(
        inputs={"features": input_metadata},
        outputs={"prediction": output_metadata})

    # Assemble the explanation specification
    explanation_spec = aiplatform_v1beta1.ExplanationSpec(
        parameters=parameters, metadata=metadata)

    model = aiplatform_v1beta1.Model(
        display_name=display_name,
        # The Cloud Storage location of the custom model
        artifact_uri=artifact_uri,
        explanation_spec=explanation_spec,
        container_spec=container_spec,
    )
    parent = f"projects/{project}/locations/{location}"
    response = client.upload_model(parent=parent, model=model)
    print("Long running operation:", response.operation.name)
    upload_model_response = response.result(timeout=timeout)
    print("upload_model_response:", upload_model_response)