Ejemplo n.º 1
0
 def testDocumentSuccess(self):
     documented_test_key = tfx_doc_controls.documented(
         'test key', 'test value')
     self.assertEqual(1, len(tfx_doc_controls.EXTRA_DOCS))
     self.assertEqual(
         'test value',
         tfx_doc_controls.EXTRA_DOCS.get(id(documented_test_key)))
Ejemplo n.º 2
0
from __future__ import division
from __future__ import print_function

from typing import Any, Dict, List, Text

import absl
from tfx import types
from tfx.components.trainer import executor as tfx_trainer_executor
from tfx.dsl.components.base import base_executor
from tfx.extensions.google_cloud_ai_platform import runner
from tfx.types import standard_component_specs
from tfx.utils import doc_controls
from tfx.utils import json_utils

TRAINING_ARGS_KEY = doc_controls.documented(
    obj='ai_platform_training_args',
    doc='Keys to the items in custom_config of Trainer for passing '
    'training_args to AI Platform.')

JOB_ID_KEY = doc_controls.documented(
    obj='ai_platform_training_job_id',
    doc='Keys to the items in custom_config of Trainer for specifying job id.')

ENABLE_UCAIP_KEY = doc_controls.documented(
    obj='ai_platform_training_enable_ucaip',
    doc='Keys to the items in custom_config of Trainer for enabling uCAIP '
    'Training.')

UCAIP_REGION_KEY = doc_controls.documented(
    obj='ai_platform_training_ucaip_region',
    doc='Keys to the items in custom_config of Trainer for specify the region '
    'of uCAIP.')
Ejemplo n.º 3
0
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud AI Platform constants module."""
from tfx.utils import doc_controls

ENABLE_VERTEX_KEY = doc_controls.documented(
    obj='ai_platform_enable_vertex',
    doc='Keys to the items in custom_config of Trainer and Pusher for enabling'
    ' Vertex AI.')

VERTEX_REGION_KEY = doc_controls.documented(
    obj='ai_platform_vertex_region',
    doc='Keys to the items in custom_config of Trainer and Pusher for '
    'specifying the region of Vertex AI.')

# Prediction container registry: https://gcr.io/cloud-aiplatform/prediction.
VERTEX_CONTAINER_IMAGE_URI_KEY = doc_controls.documented(
    obj='ai_platform_vertex_container_image_uri',
    doc='Keys to the items in custom_config of Pusher/BulkInferrer for the '
    'serving container image URI in Vertex AI.')

# Keys to the items in custom_config passed as a part of exec_properties.
SERVING_ARGS_KEY = doc_controls.documented(
Ejemplo n.º 4
0
from absl import logging
from tfx import types
from tfx.components.tuner import executor as tuner_executor
from tfx.dsl.components.base import base_executor
from tfx.extensions.google_cloud_ai_platform import constants
from tfx.extensions.google_cloud_ai_platform import runner
from tfx.extensions.google_cloud_ai_platform.trainer import executor as ai_platform_trainer_executor
from tfx.types import standard_component_specs
from tfx.utils import doc_controls
from tfx.utils import json_utils

TUNING_ARGS_KEY = doc_controls.documented(
    obj='ai_platform_tuning_args',
    doc='Keys to the items in custom_config of Tuner for passing '
    'training_job to AI Platform, and the GCP project under which '
    'the training job will be executed. In Vertex AI, this corresponds to '
    'a CustomJob as defined in:'
    'https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.customJobs#CustomJob.'
    'In CAIP, this corresponds to TrainingInputs as defined in:'
    'https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#TrainingInput'
)

REMOTE_TRIALS_WORKING_DIR_KEY = doc_controls.documented(
    obj='remote_trials_working_dir',
    doc='Keys to the items in custom_config of Tuner for specifying a working '
    'dir for remote trial.')

# Directory to store intermediate hyperparamter search progress.
# TODO(b/160188053): Use the same temp dir as the calling Executor.
_WORKING_DIRECTORY = '/tmp'

Ejemplo n.º 5
0
from tfx import types
from tfx.components.pusher import executor as tfx_pusher_executor
from tfx.types import artifact_utils
from tfx.types import standard_component_specs
from tfx.utils import doc_controls
from tfx.utils import io_utils
from tfx.utils import json_utils
from tfx.utils import telemetry_utils

_POLLING_INTERVAL_IN_SECONDS = 30

_GCS_PREFIX = 'gs://'

# Keys to the items in custom_config passed as a part of exec_properties.
SERVING_ARGS_KEY = doc_controls.documented(
    obj='bigquery_serving_args',
    doc='Keys to the items in custom_config of Pusher for passing serving args '
    'to Big Query.')

# BigQueryML serving argument keys
_PROJECT_ID_KEY = 'project_id'
_BQ_DATASET_ID_KEY = 'bq_dataset_id'
_MODEL_NAME_KEY = 'model_name'

# Project where query will be executed
_COMPUTE_PROJECT_ID_KEY = 'compute_project_id'

# Keys for custom_config.
_CUSTOM_CONFIG_KEY = 'custom_config'

# Model name should be enclosed within backticks.
# model_path should ends with asterisk glob (/*).
Ejemplo n.º 6
0
from tensorflow.python.saved_model import loader_impl  # pylint:disable=g-direct-tensorflow-import
# TODO(b/140306674): Stop using the internal TF API.

_CLOUD_PUSH_DESTINATION_RE = re.compile(
    r'^projects\/([^\/]+)\/models\/([^\/]+)\/versions\/([^\/]+)$')
_CLOUD_PUSH_DESTINATION_RE_DEFAULT_VERSION = re.compile(
    r'^projects\/([^\/]+)\/models\/([^\/]+)$')

# We define the following aliases of Any because the actual types are not
# public.
_SignatureDef = Any

# Keys to the items in custom_config passed as a part of exec_properties.
SERVING_ARGS_KEY = doc_controls.documented(
    obj='ai_platform_serving_args',
    doc='Keys to the items in custom_config of Bulk Inferrer for passing bulk'
    'inferrer args to AI Platform.')
# Keys for custom_config.
_CUSTOM_CONFIG_KEY = 'custom_config'


class Executor(bulk_inferrer_executor.Executor):
    """Bulk inferer executor for inference on AI Platform."""
    def Do(self, input_dict: Dict[str, List[types.Artifact]],
           output_dict: Dict[str, List[types.Artifact]],
           exec_properties: Dict[str, Any]) -> None:
        """Runs batch inference on a given model with given input examples.

    This function creates a new model (if necessary) and a new model version
    before inference, and cleans up resources after inference. It provides
    re-executability as it cleans up (only) the model resources that are created
Ejemplo n.º 7
0
import absl
from tfx import types
from tfx.components.trainer import executor as tfx_trainer_executor
from tfx.dsl.components.base import base_executor
from tfx.extensions.google_cloud_ai_platform import runner
from tfx.extensions.google_cloud_ai_platform.constants import ENABLE_VERTEX_KEY
from tfx.extensions.google_cloud_ai_platform.constants import VERTEX_REGION_KEY
from tfx.types import standard_component_specs
from tfx.utils import doc_controls
from tfx.utils import json_utils

TRAINING_ARGS_KEY = doc_controls.documented(
    obj='ai_platform_training_args',
    doc='Keys to the items in custom_config of Trainer for passing '
    'training_job to AI Platform, and the GCP project under which '
    'the training job will be executed. In Vertex AI, this corresponds to '
    'a CustomJob as defined in:'
    'https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.customJobs#CustomJob.'
    'In CAIP, this corresponds to TrainingInputs as defined in:'
    'https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#TrainingInput'
)

JOB_ID_KEY = doc_controls.documented(
    obj='ai_platform_training_job_id',
    doc='Keys to the items in custom_config of Trainer for specifying job id.')

ENABLE_UCAIP_KEY = doc_controls.documented(
    obj='ai_platform_training_enable_ucaip',
    doc='Deprecated. Please use ENABLE_VERTEX_KEY instead. Keys to the items in'
    ' custom_config of Trainer for enabling uCAIP Training. ')

UCAIP_REGION_KEY = doc_controls.documented(
Ejemplo n.º 8
0
import multiprocessing
import os
from typing import Any, Dict, List, Text

from absl import logging
from tfx import types
from tfx.components.tuner import executor as tuner_executor
from tfx.dsl.components.base import base_executor
from tfx.extensions.google_cloud_ai_platform import runner
from tfx.extensions.google_cloud_ai_platform.trainer import executor as ai_platform_trainer_executor
from tfx.types import standard_component_specs
from tfx.utils import doc_controls
from tfx.utils import json_utils

TUNING_ARGS_KEY = doc_controls.documented(
    obj='ai_platform_tuning_args',
    doc='Keys to the items in custom_config of Tuner for passing tuning args '
    'to AI Platform.')

REMOTE_TRIALS_WORKING_DIR_KEY = doc_controls.documented(
    obj='remote_trials_working_dir',
    doc='Keys to the items in custom_config of Tuner for specifying a working '
    'dir for remote trial.')

# Directory to store intermediate hyperparamter search progress.
# TODO(b/160188053): Use the same temp dir as the calling Executor.
_WORKING_DIRECTORY = '/tmp'


class Executor(base_executor.BaseExecutor):
    """Tuner executor that launches parallel tuning flock on Cloud AI Platform.
Ejemplo n.º 9
0
from tfx.extensions.google_cloud_ai_platform import runner
from tfx.types import artifact_utils
from tfx.types import standard_component_specs
from tfx.utils import doc_controls
from tfx.utils import io_utils
from tfx.utils import json_utils
from tfx.utils import telemetry_utils

# Google Cloud AI Platform's ModelVersion resource path format.
# https://cloud.google.com/ai-platform/prediction/docs/reference/rest/v1/projects.models.versions/get
_CAIP_MODEL_VERSION_PATH_FORMAT = (
    'projects/{project_id}/models/{model}/versions/{version}')

# Keys to the items in custom_config passed as a part of exec_properties.
SERVING_ARGS_KEY = doc_controls.documented(
    obj='ai_platform_serving_args',
    doc='Keys to the items in custom_config of Pusher for passing serving args '
    'to AI Platform.')

ENDPOINT_ARGS_KEY = doc_controls.documented(
    obj='endpoint',
    doc='Keys to the items in custom_config of Pusher for optional endpoint '
    'override.')

# Keys for custom_config.
_CUSTOM_CONFIG_KEY = 'custom_config'


class Executor(tfx_pusher_executor.Executor):
    """Deploy a model to Google Cloud AI Platform serving."""
    def Do(self, input_dict: Dict[Text, List[types.Artifact]],
           output_dict: Dict[Text, List[types.Artifact]],