Пример #1
0
    def testRoundTrip(self):
        self.assertEqual(
            tfs_flavor.parse_model_path(
                tfs_flavor.make_model_path('/foo/bar', 'my-model', 123)),
            ('/foo/bar', 'my-model', 123))

        self.assertEqual(
            tfs_flavor.make_model_path(
                *tfs_flavor.parse_model_path('/foo/bar/my-model/123')),
            '/foo/bar/my-model/123')
Пример #2
0
    def _PrepareModelPath(
            self, model_uri: Text,
            serving_spec: infra_validator_pb2.ServingSpec) -> Text:
        model_path = path_utils.serving_model_path(model_uri)
        serving_binary = serving_spec.WhichOneof('serving_binary')
        if serving_binary == 'tensorflow_serving':
            # TensorFlow Serving requires model to be stored in its own directory
            # structure flavor. If current model_path does not conform to the flavor,
            # we need to make a copy to the temporary path.
            try:
                # Check whether current model_path conforms to the tensorflow serving
                # model path flavor. (Parsed without exception)
                tf_serving_flavor.parse_model_path(
                    model_path, expected_model_name=serving_spec.model_name)
            except ValueError:
                # Copy the model to comply with the tensorflow serving model path
                # flavor.
                temp_model_path = tf_serving_flavor.make_model_path(
                    model_base_path=self._get_tmp_dir(),
                    model_name=serving_spec.model_name,
                    version=int(time.time()))
                io_utils.copy_dir(src=model_path, dst=temp_model_path)
                return temp_model_path

        return model_path
Пример #3
0
    def _PrepareModelPath(
            self, model: types.Artifact,
            serving_spec: infra_validator_pb2.ServingSpec) -> str:
        model_path = path_utils.serving_model_path(
            model.uri, path_utils.is_old_model_artifact(model))
        serving_binary = serving_spec.WhichOneof('serving_binary')
        if serving_binary == _TENSORFLOW_SERVING:
            # TensorFlow Serving requires model to be stored in its own directory
            # structure flavor. If current model_path does not conform to the flavor,
            # we need to make a copy to the temporary path.
            try:
                # Check whether current model_path conforms to the tensorflow serving
                # model path flavor. (Parsed without exception)
                tf_serving_flavor.parse_model_path(
                    model_path, expected_model_name=serving_spec.model_name)
            except ValueError:
                # Copy the model to comply with the tensorflow serving model path
                # flavor.
                temp_model_path = tf_serving_flavor.make_model_path(
                    model_base_path=self._get_tmp_dir(),
                    model_name=serving_spec.model_name,
                    version=int(time.time()))
                io_utils.copy_dir(src=model_path, dst=temp_model_path)
                self._AddCleanup(io_utils.delete_dir,
                                 self._context.get_tmp_path())
                return temp_model_path

        return model_path
Пример #4
0
    def testMakeModelPath(self):
        self.assertEqual(
            tfs_flavor.make_model_path(model_base_path='/foo/bar',
                                       model_name='my-model',
                                       version=123), '/foo/bar/my-model/123')

        self.assertEqual(
            tfs_flavor.make_model_path(
                model_base_path='s3://bucket-name/foo/bar',
                model_name='my-model',
                version=123), 's3://bucket-name/foo/bar/my-model/123')

        self.assertEqual(
            tfs_flavor.make_model_path(
                model_base_path='gs://bucket-name/foo/bar',
                model_name='my-model',
                version=123), 'gs://bucket-name/foo/bar/my-model/123')
Пример #5
0
    def MakeDockerRunParams(self, host_port: int, model_path: Text,
                            needs_mount: bool) -> Dict[Text, Any]:
        """Make parameters for docker `client.containers.run`.

    Args:
      host_port: Available port in the host to bind with container port.
      model_path: A path to the model.
      needs_mount: If True, model_path will be mounted to the container.

    Returns:
      A dictionary of docker run parameters.
    """
        result = dict(self._BASE_DOCKER_RUN_PARAMS,
                      image=self._image,
                      ports={'{}/tcp'.format(self.container_port): host_port})

        if needs_mount:
            # model_path should be a local directory. In order to make TF Serving see
            # the host model path, we need to mount model path volume to the
            # container.
            assert os.path.isdir(model_path), '{} does not exist'.format(
                model_path)
            container_model_path = tf_serving_flavor.make_model_path(
                model_base_path=self._DEFAULT_MODEL_BASE_PATH,
                model_name=self._model_name,
                version=1)
            result.update(environment=self.MakeEnvVars(),
                          mounts=[
                              docker_types.Mount(type='bind',
                                                 target=container_model_path,
                                                 source=model_path,
                                                 read_only=True)
                          ])
        else:
            # model_path is presumably a remote URI. TF Serving is able to pickup
            # model in remote directly using gfile, so all we need to do is setting
            # environment variables correctly.
            result.update(environment=self.MakeEnvVars(model_path=model_path))

        return result