def test_export_data_exception(self):
        # Setup Response
        error = status_pb2.Status()
        operation = operations_pb2.Operation(
            name='operations/test_export_data_exception', done=True)
        operation.error.CopyFrom(error)

        # Mock the API response
        channel = ChannelStub(responses=[operation])
        client = automl_v1beta1.AutoMlClient(channel=channel)

        # Setup Request
        name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]')
        output_config = {}

        response = client.export_data(name, output_config)
        exception = response.exception()
        assert exception.errors[0] == error
Beispiel #2
0
def test_model_create_status_delete(capsys):
    # create model
    client = automl.AutoMlClient()
    model_name = "test_" + datetime.datetime.now().strftime("%Y%m%d%H%M%S")
    project_location = client.location_path(project_id, compute_region)
    my_model = {
        "display_name": model_name,
        "dataset_id": "3946265060617537378",
        "image_classification_model_metadata": {
            "train_budget": 24
        },
    }
    response = client.create_model(project_location, my_model)
    operation_name = response.operation.name
    assert operation_name

    # cancel operation
    response.cancel()
def create_model(project_id,
                 compute_region,
                 dataset_id,
                 model_name,
                 user_folder,
                 train_budget=1):

    print("=================================================")
    # print("user_folder :", user_folder)
    """Create a model."""
    # [START automl_vision_create_model]
    # TODO(developer): Uncomment and set the following variables
    # project_id = 'PROJECT_ID_HERE'
    # compute_region = 'COMPUTE_REGION_HERE'
    # dataset_id = 'DATASET_ID_HERE'
    # model_name = 'MODEL_NAME_HERE'
    # train_budget = integer amount for maximum cost of model

    from google.cloud import automl_v1beta1 as automl

    client = automl.AutoMlClient()

    # A resource that represents Google Cloud Platform location.
    project_location = client.location_path(project_id, compute_region)

    # Set model name and model metadata for the image dataset.
    my_model = {
        "display_name": model_name,
        "dataset_id": dataset_id,
        "image_classification_model_metadata": {
            "train_budget": train_budget
        } if train_budget else {},
    }

    # Create a model with the model metadata in the region.
    response = client.create_model(project_location, my_model)

    print("Training operation name: {}".format(response.operation.name))
    print("Training started...")

    ### operation_id_file = os.environ['USER_NAME'] + '/operation_' + project_id + '.txt'
    operation_id_file = user_folder + '/operation_' + project_id + '.txt'
    with open(operation_id_file, 'w') as f:
        f.write(response.operation.name)
def list_models(project_id, compute_region, filter_):
    """List all models."""
    # [START automl_vision_list_models]
    # TODO(developer): Uncomment and set the following variables
    # project_id = 'PROJECT_ID_HERE'
    # compute_region = 'COMPUTE_REGION_HERE'
    # filter_ = 'DATASET_ID_HERE'

    from google.cloud import automl_v1beta1 as automl
    from google.cloud.automl_v1beta1 import enums

    client = automl.AutoMlClient()

    # A resource that represents Google Cloud Platform location.
    project_location = client.location_path(project_id, compute_region)

    # List all the models available in the region by applying filter.
    response = client.list_models(project_location, filter_)

    print("List of models:")
    for model in response:
        # Retrieve deployment state.
        if model.deployment_state == enums.Model.DeploymentState.DEPLOYED:
            deployment_state = "deployed"
        else:
            deployment_state = "undeployed"

        # Display the model information.
        print("Model name: {}".format(model.name))
        print("Model id: {}".format(model.name.split("/")[-1]))
        print("Model display name: {}".format(model.display_name))
        print("Image classification model metadata:")
        print("Training budget: {}".format(
            model.image_classification_model_metadata.train_budget))
        print("Training cost: {}".format(
            model.image_classification_model_metadata.train_cost))
        print("Stop reason: {}".format(
            model.image_classification_model_metadata.stop_reason))
        print("Base model id: {}".format(
            model.image_classification_model_metadata.base_model_id))
        print("Model create time:")
        print("\tseconds: {}".format(model.create_time.seconds))
        print("\tnanos: {}".format(model.create_time.nanos))
        print("Model deployment state: {}".format(deployment_state))
Beispiel #5
0
def list_model_evaluations(project_id, compute_region, model_id, filter_):
    """List model evaluations."""
    # [START automl_translate_list_model_evaluations]
    # TODO(developer): Uncomment and set the following variables
    # project_id = 'PROJECT_ID_HERE'
    # compute_region = 'COMPUTE_REGION_HERE'
    # model_id = 'MODEL_ID_HERE'
    # filter_ = 'filter expression here'

    from google.cloud import automl_v1beta1 as automl

    client = automl.AutoMlClient()

    # Get the full path of the model.
    model_full_id = client.model_path(project_id, compute_region, model_id)

    print("List of model evaluations:")
    for element in client.list_model_evaluations(model_full_id, filter_):
        print(element)
def create_dataset(project_id, compute_region, dataset_name, multilabel=False):
    """Create a dataset."""
    # [START automl_language_create_dataset]
    # TODO(developer): Uncomment and set the following variables
    # project_id = 'PROJECT_ID_HERE'
    # compute_region = 'COMPUTE_REGION_HERE'
    # dataset_name = 'DATASET_NAME_HERE'
    # multilabel = True for multilabel or False for multiclass

    from google.cloud import automl_v1beta1 as automl

    client = automl.AutoMlClient()

    # A resource that represents Google Cloud Platform location.
    project_location = client.location_path(project_id, compute_region)

    # Classification type is assigned based on multilabel value.
    classification_type = "MULTICLASS"
    if multilabel:
        classification_type = "MULTILABEL"

    # Specify the text classification type for the dataset.
    dataset_metadata = {"classification_type": classification_type}

    # Set dataset name and metadata.
    my_dataset = {
        "display_name": dataset_name,
        "text_classification_dataset_metadata": dataset_metadata,
    }

    # Create a dataset with the dataset metadata in the region.
    dataset = client.create_dataset(project_location, my_dataset)

    # Display the dataset information.
    print("Dataset name: {}".format(dataset.name))
    print("Dataset id: {}".format(dataset.name.split("/")[-1]))
    print("Dataset display name: {}".format(dataset.display_name))
    print("Text classification dataset metadata:")
    print("\t{}".format(dataset.text_classification_dataset_metadata))
    print("Dataset example count: {}".format(dataset.example_count))
    print("Dataset create time:")
    print("\tseconds: {}".format(dataset.create_time.seconds))
    print("\tnanos: {}".format(dataset.create_time.nanos))
Beispiel #7
0
def undeploy_model(project_id, compute_region, model_id):
    """
    Undeploy model.
    # project_id: the 'Project ID showed in GCP Console
    # compute_region: only 'us-central1' works now
    # model_id: 'table id+today's day'
    """

    from google.cloud import automl_v1beta1 as automl

    client = automl.AutoMlClient()

    # Get the full path of the model.
    model_full_id = client.model_path(project_id, compute_region, model_id)

    # Deploy model
    response = client.undeploy_model(model_full_id)

    print("Model undeployed.")
Beispiel #8
0
def import_dataset(
    project_id="YOUR_PROJECT_ID",
    dataset_id="YOUR_DATASET_ID",
    path="gs://YOUR_BUCKET_ID/path/to/data.csv",
):
    """Import a dataset."""
    client = automl.AutoMlClient()
    # Get the full path of the dataset.
    dataset_full_id = client.dataset_path(project_id, "us-central1",
                                          dataset_id)
    # Get the multiple Google Cloud Storage URIs
    input_uris = path.split(",")
    gcs_source = automl.types.GcsSource(input_uris=input_uris)
    input_config = automl.types.InputConfig(gcs_source=gcs_source)
    # Import data from the input URI
    response = client.import_data(dataset_full_id, input_config)

    print("Processing import...")
    print("Data imported. {}".format(response.result()))
Beispiel #9
0
def predict(image):
    project_id = 'qualiscan-216706'
    compute_region = 'us-central1'
    model_id = 'ICN2956512205565128229'
    score_threshold = '0.5'

    automl_client = automl.AutoMlClient()

    # Get the full path of the model.
    model_full_id = automl_client.model_path(
        project_id, compute_region, model_id
    )

    # Create client for prediction service.
    prediction_client = automl.PredictionServiceClient()
    img_data = b64decode(image)
    filename = 'some_image.jpg'  # I assume you have a way of picking unique filenames
    with open(filename, 'wb') as f:
        f.write(img_data)

    # Read the image and assign to payload.
    with open(filename, "rb") as image_file:
        content = image_file.read()
    payload = {"image": {"image_bytes": content}}

    # params is additional domain-specific parameters.
    # score_threshold is used to filter the result
    # Initialize params
    params = {}
    if score_threshold:
        params = {"score_threshold": score_threshold}

    response = prediction_client.predict(model_full_id, payload, params)
    print("Prediction results:")
    results = []
    for result in response.payload:
        print(result)
        results.append({"display_name": result.display_name,
                        "classification_score": result.classification.score})
        # print("Predicted class name: {}".format(result.display_name))
        # print("Predicted class score: {}".format(result.classification.score))
    # print(response)
    return results
Beispiel #10
0
 def __init__(self,ctx):
     super(GoogleA2ML, self).__init__()
     self.ctx = ctx
     self.client = automl.AutoMlClient()
     self.name = ctx.config['config'].get('name',None)
     self.project_id = ctx.config['google'].get('project',None)
     self.compute_region = ctx.config['google'].get('cluster/region','us-central1')
     self.metric = ctx.config['google'].get('experiment/metric',"MINIMIZE_MAE")
     self.project_location = self.client.location_path(self.project_id,self.compute_region)
     self.dataset_id = ctx.config['google'].get('dataset_id',None)
     self.dataset_name = ctx.config['google'].get('dataset_name',None)
     self.source = ctx.config['config'].get('source', None)
     self.dataset_name = self.client.dataset_path(self.project_id, self.compute_region, self.dataset_id)
     self.target = ctx.config['config'].get('target',None)
     self.exclude = ctx.config['config'].get('exclude',None)
     self.budget = ctx.config['config'].get('budget',None)
     self.operation_name = ctx.config['google'].get('operation_name',None)
     self.model_name = ctx.config['google'].get('model_name',None)
     self.gsbucket = ctx.config['google'].get('gsbucket','gs://a2ml')
Beispiel #11
0
def create_dataset(project_id="YOUR_PROJECT_ID",
                   display_name="your_datasets_display_name"):
    """Create a automl video object tracking dataset."""
    client = automl.AutoMlClient()

    # A resource that represents Google Cloud Platform location.
    project_location = f"projects/{project_id}/locations/us-central1"
    metadata = automl.VideoObjectTrackingDatasetMetadata()
    dataset = automl.Dataset(
        display_name=display_name,
        video_object_tracking_dataset_metadata=metadata,
    )

    # Create a dataset with the dataset metadata in the region.
    created_dataset = client.create_dataset(parent=project_location,
                                            dataset=dataset)
    # Display the dataset information
    print("Dataset name: {}".format(created_dataset.name))
    print("Dataset id: {}".format(created_dataset.name.split("/")[-1]))
Beispiel #12
0
def create_dataset(project_id, compute_region, dataset_name, source, target):
    """Create a dataset."""
    # [START automl_translation_create_dataset]
    # TODO(developer): Uncomment and set the following variables
    # project_id = 'PROJECT_ID_HERE'
    # compute_region = 'COMPUTE_REGION_HERE'
    # dataset_name = 'DATASET_NAME_HERE'
    # source = 'LANGUAGE_CODE_OF_SOURCE_LANGUAGE'
    # target = 'LANGUAGE_CODE_OF_TARGET_LANGUAGE'

    from google.cloud import automl_v1beta1 as automl

    client = automl.AutoMlClient()

    # A resource that represents Google Cloud Platform location.
    project_location = client.location_path(project_id, compute_region)

    # Specify the source and target language.
    dataset_metadata = {
        "source_language_code": source,
        "target_language_code": target,
    }
    # Set dataset name and dataset metadata
    my_dataset = {
        "display_name": dataset_name,
        "translation_dataset_metadata": dataset_metadata,
    }

    # Create a dataset with the dataset metadata in the region.
    dataset = client.create_dataset(project_location, my_dataset)

    # Display the dataset information
    print("Dataset name: {}".format(dataset.name))
    print("Dataset id: {}".format(dataset.name.split("/")[-1]))
    print("Dataset display name: {}".format(dataset.display_name))
    print("Translation dataset Metadata:")
    print("\tsource_language_code: {}".format(
        dataset.translation_dataset_metadata.source_language_code))
    print("\ttarget_language_code: {}".format(
        dataset.translation_dataset_metadata.target_language_code))
    print("Dataset create time:")
    print("\tseconds: {}".format(dataset.create_time.seconds))
    print("\tnanos: {}".format(dataset.create_time.nanos))
def predict(project_id,
            compute_region,
            model_id,
            file_path,
            score_threshold=""):
    """Make a prediction for an image."""
    # [START automl_vision_predict]
    # TODO(developer): Uncomment and set the following variables
    # project_id = 'PROJECT_ID_HERE'
    # compute_region = 'COMPUTE_REGION_HERE'
    # model_id = 'MODEL_ID_HERE'
    # file_path = '/local/path/to/file'
    # score_threshold = 'value from 0.0 to 0.5'

    from google.cloud import automl_v1beta1 as automl

    automl_client = automl.AutoMlClient()

    # Get the full path of the model.
    model_full_id = automl_client.model_path(project_id, compute_region,
                                             model_id)

    # Create client for prediction service.
    prediction_client = automl.PredictionServiceClient()

    # Read the image and assign to payload.
    with open(file_path, "rb") as image_file:
        content = image_file.read()
    payload = {"image": {"image_bytes": content}}

    # params is additional domain-specific parameters.
    # score_threshold is used to filter the result
    # Initialize params
    params = {}
    if score_threshold:
        params = {"score_threshold": score_threshold}

    response = prediction_client.predict(model_full_id, payload, params)
    print("Prediction results:")
    for result in response.payload:
        print("Predicted class name: {}".format(result.display_name))
        print("Predicted class score: {}".format(result.classification.score))
Beispiel #14
0
def list_models(project_id, compute_region, filter_=None):
    """
    List all models.
    # project_id: the 'Project ID showed in GCP Console
    # compute_region: only 'us-central1' works now
    """

    from google.cloud import automl_v1beta1 as automl
    from google.cloud.automl_v1beta1 import enums

    client = automl.AutoMlClient()

    # A resource that represents Google Cloud Platform location.
    project_location = client.location_path(project_id, compute_region)

    # List all the models available in the region by applying filter.
    response = client.list_models(project_location, filter_)

    print("List of models:")
    for model in response:
        # Retrieve deployment state.
        if model.deployment_state == enums.Model.DeploymentState.DEPLOYED:
            deployment_state = "deployed"
        else:
            deployment_state = "undeployed"

        # Display the model information.
        print("Model name: {}".format(model.name))
        print("Model id: {}".format(model.name.split("/")[-1]))
        print("Model display name: {}".format(model.display_name))
        metadata = model.tables_model_metadata
        print("Target column display name: {}".format(
            metadata.target_column_spec.display_name))
        print("Training budget in node milli hours: {}".format(
            metadata.train_budget_milli_node_hours))
        print("Training cost in node milli hours: {}".format(
            metadata.train_cost_milli_node_hours))
        print("Model create time:")
        print("\tseconds: {}".format(model.create_time.seconds))
        print("\tnanos: {}".format(model.create_time.nanos))
        print("Model deployment state: {}".format(deployment_state))
        print("\n")
def get_model(project_id, compute_region, model_id):
    """Get model details."""
    # [START automl_vision_get_model]
    # TODO(developer): Uncomment and set the following variables
    # project_id = 'PROJECT_ID_HERE'
    # compute_region = 'COMPUTE_REGION_HERE'
    # model_id = 'MODEL_ID_HERE'

    from google.cloud import automl_v1beta1 as automl
    from google.cloud.automl_v1beta1 import enums

    client = automl.AutoMlClient()

    # Get the full path of the model.
    model_full_id = client.model_path(project_id, compute_region, model_id)

    # Get complete detail of the model.
    model = client.get_model(model_full_id)

    # Retrieve deployment state.
    if model.deployment_state == enums.Model.DeploymentState.DEPLOYED:
        deployment_state = "deployed"
    else:
        deployment_state = "undeployed"

    # Display the model information.
    print("Model name: {}".format(model.name))
    print("Model id: {}".format(model.name.split("/")[-1]))
    print("Model display name: {}".format(model.display_name))
    print("Image classification model metadata:")
    print("Training budget: {}".format(
        model.image_classification_model_metadata.train_budget))
    print("Training cost: {}".format(
        model.image_classification_model_metadata.train_cost))
    print("Stop reason: {}".format(
        model.image_classification_model_metadata.stop_reason))
    print("Base model id: {}".format(
        model.image_classification_model_metadata.base_model_id))
    print("Model create time:")
    print("\tseconds: {}".format(model.create_time.seconds))
    print("\tnanos: {}".format(model.create_time.nanos))
    print("Model deployment state: {}".format(deployment_state))
Beispiel #16
0
def update_column_spec(project_id,
                       compute_region,
                       dataset_id,
                       table_spec_id,
                       column_spec_id,
                       type_code,
                       nullable=None):
    """
    Update column spec.
    # project_id: the 'Project ID showed in GCP Console
    # compute_region: only 'us-central1' works now
    # dataset_id: start with 'TBL', can retrieve the info by execute list_datasets
    # table_spec_id: a long format numeric id, can retrieve by executing list_table_specs
    # column_spec_id: a long format numeric id, can retrieve by executing get_table_specs
    # type_code: Numeric or Category
    """

    from google.cloud import automl_v1beta1 as automl

    client = automl.AutoMlClient()

    # Get the full path of the column spec.
    column_spec_full_id = client.column_spec_path(project_id, compute_region,
                                                  dataset_id, table_spec_id,
                                                  column_spec_id)

    # Set type code and nullable in data_type.
    data_type = {'type_code': type_code}
    if nullable is not None:
        data_type['nullable'] = nullable

    # Set the updated data_type in the column_spec.
    my_column_spec = {
        'name': column_spec_full_id,
        'data_type': data_type,
    }

    # Update the column spec.
    response = client.update_column_spec(my_column_spec)

    # synchronous check of operation status.
    print("Table spec updated. {}".format(response))
Beispiel #17
0
    def test_undeploy_model_exception(self):
        # Setup Response
        error = status_pb2.Status()
        operation = operations_pb2.Operation(
            name="operations/test_undeploy_model_exception", done=True)
        operation.error.CopyFrom(error)

        # Mock the API response
        channel = ChannelStub(responses=[operation])
        patch = mock.patch("google.api_core.grpc_helpers.create_channel")
        with patch as create_channel:
            create_channel.return_value = channel
            client = automl_v1beta1.AutoMlClient()

        # Setup Request
        name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]")

        response = client.undeploy_model(name)
        exception = response.exception()
        assert exception.errors[0] == error
def delete_model(project_id, compute_region, model_id):
    """Delete a model."""
    # [START automl_translate_delete_model]
    # TODO(developer): Uncomment and set the following variables
    # project_id = 'PROJECT_ID_HERE'
    # compute_region = 'COMPUTE_REGION_HERE'
    # model_id = 'MODEL_ID_HERE'

    from google.cloud import automl_v1beta1 as automl

    client = automl.AutoMlClient()

    # Get the full path of the model.
    model_full_id = client.model_path(project_id, compute_region, model_id)

    # Delete a model.
    response = client.delete_model(name=model_full_id)

    # synchronous check of operation status.
    print("Model deleted. {}".format(response.result()))
def delete_model(project_id, compute_region, model_id):
    """Deletes a model."""
    # [START automl_vision_iod_delete_model]
    ## To do: Uncomment and set the following variables
    # project_id = '[PROJECT_ID]'
    # compute_region = '[COMPUTE_REGION]'
    # model_id = '[MODEL_ID]'

    from google.cloud import automl_v1beta1 as automl

    client = automl.AutoMlClient()

    # Get the full path of the model.
    model_full_id = client.model_path(project_id, compute_region, model_id)

    # Delete a model.
    response = client.delete_model(model_full_id)

    # synchronous check of operation status.
    print('Model deleted. {}'.format(response.result()))
def sample_create_dataset():
    """Create Dataset"""

    client = automl_v1beta1.AutoMlClient()

    parent = client.location_path('hzyi-186423', 'us-central1')
    display_name = 'my_dataset'
    source_language_code = 'en-GB'
    target_language_code = 'fr-CA'
    translation_dataset_metadata = {
        'source_language_code': source_language_code,
        'target_language_code': target_language_code
    }
    dataset = {
        'display_name': display_name,
        'translation_dataset_metadata': translation_dataset_metadata
    }

    response = client.create_dataset(parent, dataset)
    print(u'Dataset name: {}'.format(response.name))
Beispiel #21
0
    def test_delete_dataset_exception(self):
        # Setup Response
        error = status_pb2.Status()
        operation = operations_pb2.Operation(
            name='operations/test_delete_dataset_exception', done=True)
        operation.error.CopyFrom(error)

        # Mock the API response
        channel = ChannelStub(responses=[operation])
        patch = mock.patch('google.api_core.grpc_helpers.create_channel')
        with patch as create_channel:
            create_channel.return_value = channel
            client = automl_v1beta1.AutoMlClient()

        # Setup Request
        name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]')

        response = client.delete_dataset(name)
        exception = response.exception()
        assert exception.errors[0] == error
Beispiel #22
0
def display_evaluation(project_id, compute_region, model_id, filter_=None):
    """
    Display evaluation.
    # project_id: the 'Project ID showed in GCP Console
    # compute_region: only 'us-central1' works now
    # model_id: 'table id+today's day'
    """

    from google.cloud import automl_v1beta1 as automl

    client = automl.AutoMlClient()

    # Get the full path of the model.
    model_full_id = client.model_path(project_id, compute_region, model_id)

    # List all the model evaluations in the model by applying filter.
    response = client.list_model_evaluations(model_full_id, filter_)

    # Iterate through the results.
    for evaluation in response:
        # There is evaluation for each class in a model and for overall model.
        # Get only the evaluation of overall model.
        if not evaluation.annotation_spec_id:
            model_evaluation_id = evaluation.name.split("/")[-1]

    # Resource name for the model evaluation.
    model_evaluation_full_id = client.model_evaluation_path(
        project_id, compute_region, model_id, model_evaluation_id)

    # Get a model evaluation.
    model_evaluation = client.get_model_evaluation(model_evaluation_full_id)

    regression_metrics = model_evaluation.regression_evaluation_metrics
    if str(regression_metrics):
        print("Model regression metrics:")
        print("Model RMSE: {}".format(
            regression_metrics.root_mean_squared_error))
        print("Model MAE: {}".format(regression_metrics.mean_absolute_error))
        print("Model MAPE: {}".format(
            regression_metrics.mean_absolute_percentage_error))
        print("Model R^2: {}".format(regression_metrics.r_squared))
Beispiel #23
0
    def test_export_data_exception(self):
        # Setup Response
        error = status_pb2.Status()
        operation = operations_pb2.Operation(
            name="operations/test_export_data_exception", done=True)
        operation.error.CopyFrom(error)

        # Mock the API response
        channel = ChannelStub(responses=[operation])
        patch = mock.patch("google.api_core.grpc_helpers.create_channel")
        with patch as create_channel:
            create_channel.return_value = channel
            client = automl_v1beta1.AutoMlClient()

        # Setup Request
        name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]")
        output_config = {}

        response = client.export_data(name, output_config)
        exception = response.exception()
        assert exception.errors[0] == error
def create_dataset(project_id, compute_region, dataset_metadata, path):
    """Create dataset and import data."""

    client = automl.AutoMlClient()

    # A resource that represents Google Cloud Platform location.
    parent = client.location_path(project_id, compute_region)

    # Create a dataset with the dataset metadata in the region.
    dataset = client.create_dataset(parent, dataset_metadata)

    # Import data from the input URI.
    response = client.import_data(dataset.name, {
        "gcs_source": {
            "input_uris": [path]
        }
    })

    print("Processing import...")

    print(f"Data imported. {response.result()}")
Beispiel #25
0
def test_model_create_status_delete(capsys):
    # create model
    client = automl.AutoMlClient()
    model_name = "test_" + datetime.datetime.now().strftime("%Y%m%d%H%M%S")
    project_location = client.location_path(project_id, compute_region)
    my_model = {
        "display_name": model_name,
        "dataset_id": "3876092572857648864",
        "translation_model_metadata": {"base_model": ""},
    }
    response = client.create_model(project_location, my_model)
    operation_name = response.operation.name
    assert operation_name

    # get operation status
    automl_translation_model.get_operation_status(operation_name)
    out, _ = capsys.readouterr()
    assert "Operation status: " in out

    # cancel operation
    response.cancel()
def delete_dataset(project_id, compute_region, dataset_id):
    """Delete a dataset"""
    # [START automl_vision_delete_dataset]
    # TODO(developer): Uncomment and set the following variables
    # project_id = 'PROJECT_ID_HERE'
    # compute_region = 'COMPUTE_REGION_HERE'
    # dataset_id = 'DATASET_ID_HERE'

    from google.cloud import automl_v1beta1 as automl

    client = automl.AutoMlClient()

    # Get the full path of the dataset.
    dataset_full_id = client.dataset_path(project_id, compute_region,
                                          dataset_id)

    # Delete a dataset.
    response = client.delete_dataset(dataset_full_id)

    # synchronous check of operation status.
    print("Dataset deleted. {}".format(response.result()))
Beispiel #27
0
def delete_dataset(project_id, compute_region, dataset_id):
    """
    Delete a dataset.
    # project_id: the 'Project ID showed in GCP Console
    # compute_region: only 'us-central1' works now
    # dataset_id: start with 'TBL', can retrieve the info by execute list_datasets
    """

    from google.cloud import automl_v1beta1 as automl

    client = automl.AutoMlClient()

    # Get the full path of the dataset.
    dataset_full_id = client.dataset_path(project_id, compute_region,
                                          dataset_id)

    # Delete a dataset.
    response = client.delete_dataset(dataset_full_id)

    # synchronous check of operation status.
    print("Dataset deleted. {}".format(response.result()))
Beispiel #28
0
def test_model_create_status_delete(capsys):
    # create model
    client = automl.AutoMlClient()
    model_name = "test_" + datetime.datetime.now().strftime("%Y%m%d%H%M%S")
    project_location = client.location_path(project_id, compute_region)
    my_model = {
        "display_name": model_name,
        "dataset_id": "2551826603472450019",
        "text_classification_model_metadata": {},
    }
    response = client.create_model(project_location, my_model)
    operation_name = response.operation.name
    assert operation_name

    # get operation status
    automl_natural_language_model.get_operation_status(operation_name)
    out, _ = capsys.readouterr()
    assert "Operation status: " in out

    # cancel operation
    response.cancel()
    def test_undeploy_model(self):
        # Setup Expected Response
        name_2 = 'name2-1052831874'
        done = True
        expected_response = {'name': name_2, 'done': done}
        expected_response = operations_pb2.Operation(**expected_response)

        # Mock the API response
        channel = ChannelStub(responses=[expected_response])
        client = automl_v1beta1.AutoMlClient(channel=channel)

        # Setup Request
        name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]')

        response = client.undeploy_model(name)
        assert expected_response == response

        assert len(channel.requests) == 1
        expected_request = service_pb2.UndeployModelRequest(name=name)
        actual_request = channel.requests[0][1]
        assert expected_request == actual_request
Beispiel #30
0
def sample_cancel_operation(project, operation_id):
    """
    Cancel Long-Running Operation

    Args:
      project Required. Your Google Cloud Project ID.
      operation_id Required. The ID of the Operation.
    """

    client = automl_v1beta1.AutoMlClient()

    operations_client = client._transport.operations_client

    # project = '[Google Cloud Project ID]'
    # operation_id = '[Operation ID]'
    name = "projects/{}/locations/us-central1/operations/{}".format(
        project, operation_id)

    operations_client.cancel_operation(name)

    print(u"Cancelled operation: {}".format(name))