def automl_create_dataset_for_nlp(
  gcp_project_id: str,
  gcp_region: str,
  dataset_display_name: str,
  api_endpoint: str = None,
) -> NamedTuple('Outputs', [('dataset_path', str), ('dataset_status', str), ('dataset_id', str)]):

  import sys
  import subprocess
  subprocess.run([sys.executable, '-m', 'pip', 'install', 'googleapis-common-protos==1.6.0',
      '--no-warn-script-location'],
      env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'}, check=True)
  subprocess.run([sys.executable, '-m', 'pip', 'install', 'google-cloud-automl==0.9.0',
      '--quiet', '--no-warn-script-location'],
      env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'}, check=True)

  import google
  import logging
  from google.api_core.client_options import ClientOptions
  from google.cloud import automl

  logging.getLogger().setLevel(logging.INFO)  # TODO: make level configurable

  if api_endpoint:
    client_options = ClientOptions(api_endpoint=api_endpoint)
    client = automl.AutoMlClient(client_options=client_options)
  else:
    client = automl.AutoMlClient()
  
  status = 'created'
  project_location = client.location_path(gcp_project_id, gcp_region)
  # Check if dataset is existed.
  for element in client.list_datasets(project_location):
    if element.display_name == dataset_display_name:
      status = 'created but existed'
      if element.example_count == 0:
        status = 'existed but empty'
        return (element.name, status, element.name.rsplit('/', 1)[-1])
  try:
    metadata = automl.types.TextClassificationDatasetMetadata(classification_type=automl.enums.ClassificationType.MULTICLASS)
    dataset = automl.types.Dataset(display_name=dataset_display_name, text_classification_dataset_metadata=metadata,)
    # Create a dataset with the given display name
    response = client.create_dataset(project_location, dataset)
    created_dataset = response.result()
    # Log info about the created dataset
    logging.info("Dataset name: {}".format(created_dataset.name))
    logging.info("Dataset id: {}".format(created_dataset.name.split("/")[-1]))
    logging.info("Dataset display name: {}".format(dataset.display_name))
    logging.info("Dataset example count: {}".format(dataset.example_count))
    logging.info("Dataset create time:")
    logging.info("\tseconds: {}".format(dataset.create_time.seconds))
    logging.info("\tnanos: {}".format(dataset.create_time.nanos))
    
    dataset_id = created_dataset.name.rsplit('/', 1)[-1]
    return (created_dataset.name, status, dataset_id)
  except google.api_core.exceptions.GoogleAPICallError as e:
    logging.warning(e)
    raise e
Ejemplo n.º 2
0
def automl_deploy_nlp_model(
    gcp_project_id: str,
    gcp_region: str,
    model_display_name: str,
    model_id,
    api_endpoint: str = None,
) -> NamedTuple('Outputs', [('model_display_name', str), ('model_id', str),
                            ('status', str)]):
    import subprocess
    import sys
    subprocess.run([
        sys.executable, '-m', 'pip', 'install',
        'googleapis-common-protos==1.6.0', '--no-warn-script-location'
    ],
                   env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'},
                   check=True)
    subprocess.run([
        sys.executable, '-m', 'pip', 'install', 'google-cloud-automl==0.9.0',
        '--quiet', '--no-warn-script-location'
    ],
                   env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'},
                   check=True)

    import google
    import logging
    from google.api_core.client_options import ClientOptions
    from google.api_core import exceptions
    from google.cloud import automl
    from google.cloud.automl import enums

    logging.getLogger().setLevel(logging.INFO)  # TODO: make level configurable

    if api_endpoint:
        client_options = ClientOptions(api_endpoint=api_endpoint)
        client = automl.AutoMlClient(client_options=client_options)
    else:
        client = automl.AutoMlClient()

    try:
        logging.info('Deploying model {}'.format(model_display_name))
        model_full_id = client.model_path(gcp_project_id, gcp_region, model_id)
        response = client.deploy_model(model_full_id)
        # synchronous wait
        logging.info("Model deployed. {}".format(response.result()))
        status = 'deployed'
    except exceptions.NotFound as e:
        logging.warning(e)
        status = 'not_found'
    except Exception as e:
        logging.warning(e)
        status = 'undeployed'
    logging.info('Model status: {}'.format(status))

    return (model_display_name, model_id, status)
def create_model(project_id, dataset_id, display_name):
    """Create a model."""
    # [START automl_vision_classification_create_model]
    from google.cloud import automl

    # TODO(developer): Uncomment and set the following variables
    # project_id = "YOUR_PROJECT_ID"
    # dataset_id = "YOUR_DATASET_ID"
    # display_name = "your_models_display_name"

    client = automl.AutoMlClient()

    # A resource that represents Google Cloud Platform location.
    project_location = client.location_path(project_id, "us-central1")
    # Leave model unset to use the default base model provided by Google
    metadata = automl.types.ImageClassificationModelMetadata(
        train_budget_milli_node_hours=24000)
    model = automl.types.Model(
        display_name=display_name,
        dataset_id=dataset_id,
        image_classification_model_metadata=metadata,
    )

    # Create a model with the model metadata in the region.
    response = client.create_model(project_location, model)

    print("Training operation name: {}".format(response.operation.name))
    print("Training started...")
Ejemplo n.º 4
0
def create_dataset(project_id, display_name):
    """Create a dataset."""
    # [START automl_vision_object_detection_create_dataset]
    from google.cloud import automl

    # TODO(developer): Uncomment and set the following variables
    # project_id = "YOUR_PROJECT_ID"
    # display_name = "your_datasets_display_name"

    client = automl.AutoMlClient()

    # A resource that represents Google Cloud Platform location.
    project_location = f"projects/{project_id}/locations/us-central1"
    metadata = automl.ImageObjectDetectionDatasetMetadata()
    dataset = automl.Dataset(
        display_name=display_name,
        image_object_detection_dataset_metadata=metadata,
    )

    # Create a dataset with the dataset metadata in the region.
    response = client.create_dataset(parent=project_location, dataset=dataset)

    created_dataset = response.result()

    # Display the dataset information
    print("Dataset name: {}".format(created_dataset.name))
    print("Dataset id: {}".format(created_dataset.name.split("/")[-1]))
Ejemplo n.º 5
0
def automl_split_dataset_table_column_names(
    dataset_path: str,
    target_column_name: str,
    table_index: int = 0,
) -> NamedTuple('Outputs', [('target_column_path', str), ('feature_column_paths', list)]):
    import sys
    import subprocess
    subprocess.run([sys.executable, '-m', 'pip', 'install', 'google-cloud-automl==0.4.0', '--quiet', '--no-warn-script-location'], env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'}, check=True)

    from google.cloud import automl
    client = automl.AutoMlClient()
    list_table_specs_response = client.list_table_specs(dataset_path)
    table_specs = [s for s in list_table_specs_response]
    print('table_specs=')
    print(table_specs)
    table_spec_name = table_specs[table_index].name

    list_column_specs_response = client.list_column_specs(table_spec_name)
    column_specs = [s for s in list_column_specs_response]
    print('column_specs=')
    print(column_specs)

    target_column_spec = [s for s in column_specs if s.display_name == target_column_name][0]
    feature_column_specs = [s for s in column_specs if s.display_name != target_column_name]
    feature_column_names = [s.name for s in feature_column_specs]

    import json
    return (target_column_spec.name, json.dumps(feature_column_names))
Ejemplo n.º 6
0
def create_dataset(project_id, display_name):
    """Create a dataset."""
    # [START automl_language_sentiment_analysis_create_dataset]
    from google.cloud import automl

    # TODO(developer): Uncomment and set the following variables
    # project_id = "YOUR_PROJECT_ID"
    # display_name = "YOUR_DATASET_NAME"

    client = automl.AutoMlClient()

    # A resource that represents Google Cloud Platform location.
    project_location = client.location_path(project_id, "us-central1")

    # Each dataset requires a sentiment score with a defined sentiment_max
    # value, for more information on TextSentimentDatasetMetadata, see:
    # https://cloud.google.com/natural-language/automl/docs/prepare#sentiment-analysis
    # https://cloud.google.com/automl/docs/reference/rpc/google.cloud.automl.v1#textsentimentdatasetmetadata
    metadata = automl.types.TextSentimentDatasetMetadata(
        sentiment_max=4)  # Possible max sentiment score: 1-10

    dataset = automl.types.Dataset(display_name=display_name,
                                   text_sentiment_dataset_metadata=metadata)

    # Create a dataset with the dataset metadata in the region.
    response = client.create_dataset(project_location, dataset)

    created_dataset = response.result()

    # Display the dataset information
    print("Dataset name: {}".format(created_dataset.name))
    print("Dataset id: {}".format(created_dataset.name.split("/")[-1]))
Ejemplo n.º 7
0
def get_dataset(project_id, dataset_id):
    """Get a dataset."""
    # [START automl_language_entity_extraction_get_dataset]
    # [START automl_language_sentiment_analysis_get_dataset]
    # [START automl_language_text_classification_get_dataset]
    # [START automl_translate_get_dataset]
    # [START automl_vision_classification_get_dataset]
    # [START automl_vision_object_detection_get_dataset]
    from google.cloud import automl

    # TODO(developer): Uncomment and set the following variables
    # project_id = "YOUR_PROJECT_ID"
    # dataset_id = "YOUR_DATASET_ID"

    client = automl.AutoMlClient()
    # Get the full path of the dataset
    dataset_full_id = client.dataset_path(project_id, "us-central1",
                                          dataset_id)
    dataset = client.get_dataset(name=dataset_full_id)

    # Display the dataset information
    print("Dataset name: {}".format(dataset.name))
    print("Dataset id: {}".format(dataset.name.split("/")[-1]))
    print("Dataset display name: {}".format(dataset.display_name))
    print("Dataset create time: {}".format(dataset.create_time))
    # [END automl_language_sentiment_analysis_get_dataset]
    # [END automl_language_text_classification_get_dataset]
    # [END automl_translate_get_dataset]
    # [END automl_vision_classification_get_dataset]
    # [END automl_vision_object_detection_get_dataset]
    print("Text extraction dataset metadata: {}".format(
        dataset.text_extraction_dataset_metadata))
    # [END automl_language_entity_extraction_get_dataset]

    # [START automl_language_sentiment_analysis_get_dataset]
    print("Text sentiment dataset metadata: {}".format(
        dataset.text_sentiment_dataset_metadata))
    # [END automl_language_sentiment_analysis_get_dataset]

    # [START automl_language_text_classification_get_dataset]
    print("Text classification dataset metadata: {}".format(
        dataset.text_classification_dataset_metadata))
    # [END automl_language_text_classification_get_dataset]

    # [START automl_translate_get_dataset]
    print("Translation dataset metadata:")
    print("\tsource_language_code: {}".format(
        dataset.translation_dataset_metadata.source_language_code))
    print("\ttarget_language_code: {}".format(
        dataset.translation_dataset_metadata.target_language_code))
    # [END automl_translate_get_dataset]

    # [START automl_vision_classification_get_dataset]
    print("Image classification dataset metadata: {}".format(
        dataset.image_classification_dataset_metadata))
    # [END automl_vision_classification_get_dataset]

    # [START automl_vision_object_detection_get_dataset]
    print("Image object detection dataset metadata: {}".format(
        dataset.image_object_detection_dataset_metadata))
def create_dataset(project_id, display_name):
    """Create a dataset."""
    # [START automl_vision_classification_create_dataset]
    from google.cloud import automl

    # TODO(developer): Uncomment and set the following variables
    # project_id = "YOUR_PROJECT_ID"
    # display_name = "your_datasets_display_name"

    client = automl.AutoMlClient()

    # A resource that represents Google Cloud Platform location.
    project_location = client.location_path(project_id, "us-central1")
    # Specify the classification type
    # Types:
    # MultiLabel: Multiple labels are allowed for one example.
    # MultiClass: At most one label is allowed per example.
    metadata = automl.types.ImageClassificationDatasetMetadata(
        classification_type=automl.enums.ClassificationType.MULTILABEL)
    dataset = automl.types.Dataset(
        display_name=display_name,
        image_classification_dataset_metadata=metadata,
    )

    # Create a dataset with the dataset metadata in the region.
    response = client.create_dataset(project_location, dataset)

    created_dataset = response.result()

    # Display the dataset information
    print("Dataset name: {}".format(created_dataset.name))
    print("Dataset id: {}".format(created_dataset.name.split("/")[-1]))
def create_model(project_id, dataset_id, display_name):
    """Create a model."""
    # [START automl_language_entity_extraction_create_model]
    from google.cloud import automl

    # TODO(developer): Uncomment and set the following variables
    # project_id = "YOUR_PROJECT_ID"
    # dataset_id = "YOUR_DATASET_ID"
    # display_name = "YOUR_MODEL_NAME"

    client = automl.AutoMlClient()

    # A resource that represents Google Cloud Platform location.
    project_location = f"projects/{project_id}/locations/us-central1"
    # Leave model unset to use the default base model provided by Google
    metadata = automl.TextExtractionModelMetadata()
    model = automl.Model(
        display_name=display_name,
        dataset_id=dataset_id,
        text_extraction_model_metadata=metadata,
    )

    # Create a model with the model metadata in the region.
    response = client.create_model(parent=project_location, model=model)

    print("Training operation name: {}".format(response.operation.name))
    print("Training started...")
Ejemplo n.º 10
0
def automl_import_data_from_bigquery(
    dataset_path,
    input_uri: str,
    retry=None, #=google.api_core.gapic_v1.method.DEFAULT,
    timeout=None, #=google.api_core.gapic_v1.method.DEFAULT,
    metadata: dict = None,
) -> NamedTuple('Outputs', [('dataset_path', str)]):
    import sys
    import subprocess
    subprocess.run([sys.executable, '-m', 'pip', 'install', 'google-cloud-automl==0.4.0', '--quiet', '--no-warn-script-location'], env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'}, check=True)

    import google
    from google.cloud import automl
    client = automl.AutoMlClient()
    input_config = {
        'bigquery_source': {
            'input_uri': input_uri,
        },
    }
    response = client.import_data(
        dataset_path,
        input_config,
        retry or google.api_core.gapic_v1.method.DEFAULT,
        timeout or google.api_core.gapic_v1.method.DEFAULT,
        metadata,
    )
    result = response.result()
    print(result)
    metadata = response.metadata
    print(metadata)
    return (dataset_path)
Ejemplo n.º 11
0
def list_models(project_id):
    """List models."""
    # [START automl_list_models]
    from google.cloud import automl

    # TODO(developer): Uncomment and set the following variables
    # project_id = "YOUR_PROJECT_ID"

    client = automl.AutoMlClient()
    # A resource that represents Google Cloud Platform location.
    project_location = client.location_path(project_id, "us-central1")
    response = client.list_models(project_location, "")

    print("List of models:")
    for model in response:
        # Display the model information.
        if (model.deployment_state ==
                automl.enums.Model.DeploymentState.DEPLOYED):
            deployment_state = "deployed"
        else:
            deployment_state = "undeployed"

        print("Model name: {}".format(model.name))
        print("Model id: {}".format(model.name.split("/")[-1]))
        print("Model display name: {}".format(model.display_name))
        print("Model create time:")
        print("\tseconds: {}".format(model.create_time.seconds))
        print("\tnanos: {}".format(model.create_time.nanos))
        print("Model deployment state: {}".format(deployment_state))
Ejemplo n.º 12
0
def automl_deploy_model(
    model_path: str,
) -> NamedTuple('Outputs', [
    ('model_path', str),
]):
    """Deploys a trained model.

    Args:
        model_path: The resource name of the model to export. Format: 'projects/<project>/locations/<location>/models/<model>'

    Annotations:
        author: Alexey Volkov <*****@*****.**>
    """
    from google.cloud import automl
    client = automl.AutoMlClient()
    response = client.deploy_model(
        name=model_path,
    )
    print('Operation started:')
    print(response.operation)
    result = response.result()
    metadata = response.metadata
    print('Operation finished:')
    print(metadata)
    return (model_path, )
def create_dataset(project_id, display_name):
    """Create a dataset."""
    # [START automl_language_entity_extraction_create_dataset]
    from google.cloud import automl

    # TODO(developer): Uncomment and set the following variables
    # project_id = "YOUR_PROJECT_ID"
    # display_name = "YOUR_DATASET_NAME"

    client = automl.AutoMlClient()

    # A resource that represents Google Cloud Platform location.
    project_location = client.location_path(project_id, "us-central1")
    metadata = automl.types.TextExtractionDatasetMetadata()
    dataset = automl.types.Dataset(display_name=display_name,
                                   text_extraction_dataset_metadata=metadata)

    # Create a dataset with the dataset metadata in the region.
    response = client.create_dataset(project_location, dataset)

    created_dataset = response.result()

    # Display the dataset information
    print("Dataset name: {}".format(created_dataset.name))
    print("Dataset id: {}".format(created_dataset.name.split("/")[-1]))
Ejemplo n.º 14
0
def create_model(project_id,
                 compute_region,
                 dataset_id,
                 model_name,
                 train_budget=24):
    """Create a model."""

    client = automl.AutoMlClient()

    # A resource that represents Google Cloud Platform location.
    project_location = client.location_path(project_id, compute_region)

    # Set model name and model metadata for the image dataset.
    my_model = {
        "display_name": model_name,
        "dataset_id": dataset_id,
        "image_classification_model_metadata": {
            "train_budget": train_budget
        } if train_budget else {},
    }

    # Create a model with the model metadata in the region.
    operation = client.create_model(project_location, my_model)
    opname = operation.operation.name
    print("Training operation name: {}".format(opname))
    print("Training started, waiting for result...")
    result = operation.result(
    )  # do synchronous wait in this case. TODO: Are there timeouts/deadlines?
    print('result:')
    print(result)
    return opname, result
Ejemplo n.º 15
0
def list_models(project_id):
    """List models."""
    # [START automl_list_models]
    from google.cloud import automl

    # TODO(developer): Uncomment and set the following variables
    # project_id = "YOUR_PROJECT_ID"

    client = automl.AutoMlClient()
    # A resource that represents Google Cloud Platform location.
    project_location = f"projects/{project_id}/locations/us-central1"

    request = automl.ListModelsRequest(parent=project_location, filter="")
    response = client.list_models(request=request)

    print("List of models:")
    for model in response:
        # Display the model information.
        if model.deployment_state == automl.Model.DeploymentState.DEPLOYED:
            deployment_state = "deployed"
        else:
            deployment_state = "undeployed"

        print("Model name: {}".format(model.name))
        print("Model id: {}".format(model.name.split("/")[-1]))
        print("Model display name: {}".format(model.display_name))
        print("Model create time: {}".format(model.create_time))
        print("Model deployment state: {}".format(deployment_state))
def create_dataset(project_id, display_name):
    """Create a dataset."""
    # [START automl_language_text_classification_create_dataset]
    from google.cloud import automl

    # TODO(developer): Uncomment and set the following variables
    # project_id = "YOUR_PROJECT_ID"
    # display_name = "YOUR_DATASET_NAME"

    client = automl.AutoMlClient()

    # A resource that represents Google Cloud Platform location.
    project_location = f"projects/{project_id}/locations/us-central1"
    # Specify the classification type
    # Types:
    # MultiLabel: Multiple labels are allowed for one example.
    # MultiClass: At most one label is allowed per example.
    metadata = automl.TextClassificationDatasetMetadata(
        classification_type=automl.ClassificationType.MULTICLASS)
    dataset = automl.Dataset(
        display_name=display_name,
        text_classification_dataset_metadata=metadata,
    )

    # Create a dataset with the dataset metadata in the region.
    response = client.create_dataset(parent=project_location, dataset=dataset)

    created_dataset = response.result()

    # Display the dataset information
    print("Dataset name: {}".format(created_dataset.name))
    print("Dataset id: {}".format(created_dataset.name.split("/")[-1]))
Ejemplo n.º 17
0
def get_model(project_id, model_id):
    """Get a model."""
    # [START automl_get_model]
    from google.cloud import automl

    # TODO(developer): Uncomment and set the following variables
    # project_id = "YOUR_PROJECT_ID"
    # model_id = "YOUR_MODEL_ID"

    client = automl.AutoMlClient()
    # Get the full path of the model.
    model_full_id = client.model_path(project_id, "us-central1", model_id)
    model = client.get_model(name=model_full_id)

    # Retrieve deployment state.
    if model.deployment_state == automl.Model.DeploymentState.DEPLOYED:
        deployment_state = "deployed"
    else:
        deployment_state = "undeployed"

    # Display the model information.
    print("Model name: {}".format(model.name))
    print("Model id: {}".format(model.name.split("/")[-1]))
    print("Model display name: {}".format(model.display_name))
    print("Model create time: {}".format(model.create_time))
    print("Model deployment state: {}".format(deployment_state))
Ejemplo n.º 18
0
def create_dataset(project_id, display_name):
    """Create a dataset."""
    # [START automl_translate_create_dataset]
    from google.cloud import automl

    # TODO(developer): Uncomment and set the following variables
    # project_id = "YOUR_PROJECT_ID"
    # display_name = "YOUR_DATASET_NAME"

    client = automl.AutoMlClient()

    # A resource that represents Google Cloud Platform location.
    project_location = f"projects/{project_id}/locations/us-central1"
    # For a list of supported languages, see:
    # https://cloud.google.com/translate/automl/docs/languages
    dataset_metadata = automl.TranslationDatasetMetadata(
        source_language_code="en", target_language_code="ja")
    dataset = automl.Dataset(
        display_name=display_name,
        translation_dataset_metadata=dataset_metadata,
    )

    # Create a dataset with the dataset metadata in the region.
    response = client.create_dataset(parent=project_location, dataset=dataset)

    created_dataset = response.result()

    # Display the dataset information
    print("Dataset name: {}".format(created_dataset.name))
    print("Dataset id: {}".format(created_dataset.name.split("/")[-1]))
def create_model(project_id, dataset_id, display_name):
    """Create a model."""
    # [START automl_vision_classification_create_model]
    from google.cloud import automl

    # TODO(developer): Uncomment and set the following variables
    # project_id = "YOUR_PROJECT_ID"
    # dataset_id = "YOUR_DATASET_ID"
    # display_name = "your_models_display_name"

    client = automl.AutoMlClient()

    # A resource that represents Google Cloud Platform location.
    project_location = f"projects/{project_id}/locations/us-central1"
    # Leave model unset to use the default base model provided by Google
    # train_budget_milli_node_hours: The actual train_cost will be equal or
    # less than this value.
    # https://cloud.google.com/automl/docs/reference/rpc/google.cloud.automl.v1#imageclassificationmodelmetadata
    metadata = automl.ImageClassificationModelMetadata(
        train_budget_milli_node_hours=24000)
    model = automl.Model(
        display_name=display_name,
        dataset_id=dataset_id,
        image_classification_model_metadata=metadata,
    )

    # Create a model with the model metadata in the region.
    response = client.create_model(parent=project_location, model=model)

    print("Training operation name: {}".format(response.operation.name))
    print("Training started...")
    # [END automl_vision_classification_create_model]
    return response
def get_model_evaluation(project_id, model_id, model_evaluation_id):
    """Get model evaluation."""
    # [START automl_language_entity_extraction_get_model_evaluation]
    # [START automl_language_sentiment_analysis_get_model_evaluation]
    # [START automl_language_text_classification_get_model_evaluation]
    # [START automl_translate_get_model_evaluation]
    # [START automl_vision_classification_get_model_evaluation]
    # [START automl_vision_object_detection_get_model_evaluation]
    from google.cloud import automl

    # TODO(developer): Uncomment and set the following variables
    # project_id = "YOUR_PROJECT_ID"
    # model_id = "YOUR_MODEL_ID"
    # model_evaluation_id = "YOUR_MODEL_EVALUATION_ID"

    client = automl.AutoMlClient()
    # Get the full path of the model evaluation.
    model_evaluation_full_id = client.model_evaluation_path(
        project_id, "us-central1", model_id, model_evaluation_id)

    # Get complete detail of the model evaluation.
    response = client.get_model_evaluation(model_evaluation_full_id)

    print("Model evaluation name: {}".format(response.name))
    print("Model annotation spec id: {}".format(response.annotation_spec_id))
    print("Create Time:")
    print("\tseconds: {}".format(response.create_time.seconds))
    print("\tnanos: {}".format(response.create_time.nanos / 1e9))
    print("Evaluation example count: {}".format(
        response.evaluated_example_count))
    # [END automl_language_sentiment_analysis_get_model_evaluation]
    # [END automl_language_text_classification_get_model_evaluation]
    # [END automl_translate_get_model_evaluation]
    # [END automl_vision_classification_get_model_evaluation]
    # [END automl_vision_object_detection_get_model_evaluation]
    print("Entity extraction model evaluation metrics: {}".format(
        response.text_extraction_evaluation_metrics))
    # [END automl_language_entity_extraction_get_model_evaluation]

    # [START automl_language_sentiment_analysis_get_model_evaluation]
    print("Sentiment analysis model evaluation metrics: {}".format(
        response.text_sentiment_evaluation_metrics))
    # [END automl_language_sentiment_analysis_get_model_evaluation]

    # [START automl_language_text_classification_get_model_evaluation]
    # [START automl_vision_classification_get_model_evaluation]
    print("Classification model evaluation metrics: {}".format(
        response.classification_evaluation_metrics))
    # [END automl_language_text_classification_get_model_evaluation]
    # [END automl_vision_classification_get_model_evaluation]

    # [START automl_translate_get_model_evaluation]
    print("Translation model evaluation metrics: {}".format(
        response.translation_evaluation_metrics))
    # [END automl_translate_get_model_evaluation]

    # [START automl_vision_object_detection_get_model_evaluation]
    print("Object detection model evaluation metrics: {}".format(
        response.image_object_detection_evaluation_metrics))
Ejemplo n.º 21
0
def operation_id():
    client = automl.AutoMlClient()
    project_location = f"projects/{PROJECT_ID}/locations/us-central1"

    generator = client._transport.operations_client.list_operations(
        project_location, filter_="").pages
    page = next(generator)
    operation = page.next()
    yield operation.name
def verify_model_state():
    client = automl.AutoMlClient()
    model_full_id = client.model_path(PROJECT_ID, "us-central1", MODEL_ID)

    model = client.get_model(model_full_id)
    if model.deployment_state == automl.enums.Model.DeploymentState.UNDEPLOYED:
        # Deploy model if it is not deployed
        response = client.deploy_model(model_full_id)
        response.result()
Ejemplo n.º 23
0
def operation_id():
    client = automl.AutoMlClient()
    project_location = client.location_path(PROJECT_ID, "us-central1")
    generator = client.transport._operations_client.list_operations(
        project_location, filter_=""
    ).pages
    page = next(generator)
    operation = page.next()
    yield operation.name
def list_model_evaluations(project_id, model_id):
    """List model evaluations."""
    # [START automl_language_entity_extraction_list_model_evaluations]
    # [START automl_language_sentiment_analysis_list_model_evaluations]
    # [START automl_language_text_classification_list_model_evaluations]
    # [START automl_translate_list_model_evaluations]
    # [START automl_vision_classification_list_model_evaluations]
    # [START automl_vision_object_detection_list_model_evaluations]
    from google.cloud import automl

    # TODO(developer): Uncomment and set the following variables
    # project_id = "YOUR_PROJECT_ID"
    # model_id = "YOUR_MODEL_ID"

    client = automl.AutoMlClient()
    # Get the full path of the model.
    model_full_id = client.model_path(project_id, "us-central1", model_id)

    print("List of model evaluations:")
    for evaluation in client.list_model_evaluations(model_full_id, ""):
        print("Model evaluation name: {}".format(evaluation.name))
        print("Model annotation spec id: {}".format(
            evaluation.annotation_spec_id))
        print("Create Time:")
        print("\tseconds: {}".format(evaluation.create_time.seconds))
        print("\tnanos: {}".format(evaluation.create_time.nanos / 1e9))
        print("Evaluation example count: {}".format(
            evaluation.evaluated_example_count))
        # [END automl_language_sentiment_analysis_list_model_evaluations]
        # [END automl_language_text_classification_list_model_evaluations]
        # [END automl_translate_list_model_evaluations]
        # [END automl_vision_classification_list_model_evaluations]
        # [END automl_vision_object_detection_list_model_evaluations]
        print("Entity extraction model evaluation metrics: {}".format(
            evaluation.text_extraction_evaluation_metrics))
        # [END automl_language_entity_extraction_list_model_evaluations]

        # [START automl_language_sentiment_analysis_list_model_evaluations]
        print("Sentiment analysis model evaluation metrics: {}".format(
            evaluation.text_sentiment_evaluation_metrics))
        # [END automl_language_sentiment_analysis_list_model_evaluations]

        # [START automl_language_text_classification_list_model_evaluations]
        # [START automl_vision_classification_list_model_evaluations]
        print("Classification model evaluation metrics: {}".format(
            evaluation.classification_evaluation_metrics))
        # [END automl_language_text_classification_list_model_evaluations]
        # [END automl_vision_classification_list_model_evaluations]

        # [START automl_translate_list_model_evaluations]
        print("Translation model evaluation metrics: {}".format(
            evaluation.translation_evaluation_metrics))
        # [END automl_translate_list_model_evaluations]

        # [START automl_vision_object_detection_list_model_evaluations]
        print("Object detection model evaluation metrics: {}\n\n".format(
            evaluation.image_object_detection_evaluation_metrics))
Ejemplo n.º 25
0
 def test_default_credentials_automl_client(self):
     env = EnvironmentVarGuard()
     env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
     env.set('KAGGLE_KERNEL_INTEGRATIONS', 'CLOUDAI')
     with env:
         init_automl()
         automl_client = automl.AutoMlClient()
         self.assertIsNotNone(automl_client.credentials)
         self.assertIsInstance(automl_client.credentials, KaggleKernelCredentials)
         self.assertTrue(automl_client._connection.user_agent.startswith("kaggle-gcp-client/1.0"))
Ejemplo n.º 26
0
 def test_user_provided_credentials(self):
     credentials = _make_credentials()
     env = EnvironmentVarGuard()
     env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
     env.set('KAGGLE_KERNEL_INTEGRATIONS', 'CLOUDAI')
     with env:
         init_automl()
         client = automl.AutoMlClient(credentials=credentials)
         self.assertNotIsInstance(client.credentials, KaggleKernelCredentials)
         self.assertIsNotNone(client.credentials)
def test_vision_object_detection_create_model(capsys):
    vision_object_detection_create_model.create_model(
        PROJECT_ID, DATASET_ID, "object_test_create_model")
    out, _ = capsys.readouterr()
    assert "Training started" in out

    # Cancel the operation
    operation_id = out.split("Training operation name: ")[1].split("\n")[0]
    client = automl.AutoMlClient()
    client.transport._operations_client.cancel_operation(operation_id)
def get_evaluation_id():
    client = automl.AutoMlClient()
    model_full_id = client.model_path(PROJECT_ID, "us-central1", MODEL_ID)
    evaluation = None
    for e in client.list_model_evaluations(model_full_id, ""):
        evaluation = e
        break
    model_evaluation_id = evaluation.name.split(
        "{}/modelEvaluations/".format(MODEL_ID))[1].split("\n")[0]
    yield model_evaluation_id
def setup():
    # Verify the model is deployed before trying to predict
    client = automl.AutoMlClient()
    model_full_id = client.model_path(PROJECT_ID, "us-central1", MODEL_ID)

    model = client.get_model(name=model_full_id)
    if model.deployment_state == automl.Model.DeploymentState.UNDEPLOYED:
        # Deploy model if it is not deployed
        response = client.deploy_model(name=model_full_id)
        response.result()
Ejemplo n.º 30
0
def create_dataset():
    client = automl.AutoMlClient()
    project_location = client.location_path(PROJECT_ID, "us-central1")
    display_name = "test_" + datetime.datetime.now().strftime("%Y%m%d%H%M%S")
    metadata = automl.types.TextSentimentDatasetMetadata(sentiment_max=4)
    dataset = automl.types.Dataset(display_name=display_name,
                                   text_sentiment_dataset_metadata=metadata)
    response = client.create_dataset(project_location, dataset)
    dataset_id = response.result().name.split("/")[-1]

    yield dataset_id