def download_to_directory(self, bento_pb, target_dir: str) -> None:
        """
        Download or move bundle bundle to target directory.

        Args:
            bento_pb: bento bundle protobuf dict
            target_dir (`str`):

        Returns:
            None

        Raises:
            BentoMLException:
                Unrecognised Bento bundle storage type
        """
        if bento_pb.uri.type == BentoUri.S3:
            bento_service_bundle_path = bento_pb.uri.s3_presigned_url
        elif bento_pb.uri.type == BentoUri.GCS:
            bento_service_bundle_path = bento_pb.uri.gcs_presigned_url
        elif bento_pb.uri.type == BentoUri.LOCAL:
            # Download from remote yatai otherwise provide the file path.
            if is_remote_yatai(self.yatai_service):
                bento_service_bundle_path = self._download_bento(
                    bento_pb.name, bento_pb.version)
            else:
                bento_service_bundle_path = bento_pb.uri.uri
        else:
            raise BentoMLException(
                f'Unrecognized Bento bundle storage type {bento_pb.uri.type}')

        safe_retrieve(bento_service_bundle_path, target_dir)
Example #2
0
def lambda_package(project_dir, aws_region, s3_bucket_name, deployment_prefix):
    prefix_path = os.path.join(deployment_prefix, 'lambda-functions')
    build_dir = os.path.join(project_dir, '.aws-sam', 'build')

    return_code, stdout, stderr = call_sam_command(
        [
            'package',
            '--force-upload',
            '--s3-bucket',
            s3_bucket_name,
            '--s3-prefix',
            prefix_path,
            '--template-file',
            'template.yaml',
            '--output-template-file',
            'packaged.yaml',
            '--region',
            aws_region,
        ],
        project_dir=build_dir,
        region=aws_region,
    )
    if return_code != 0:
        error_message = stderr
        if not error_message:
            error_message = stdout
        raise BentoMLException(
            'Failed to package lambda function. {}'.format(error_message))
    else:
        return stdout
Example #3
0
def ensure_is_ready_to_deploy_to_cloud_formation(stack_name, region):
    try:
        cf_client = boto3.client('cloudformation', region)
        logger.debug('Checking stack description')
        describe_formation_result = cf_client.describe_stacks(
            StackName=stack_name)
        result_stacks = describe_formation_result.get('Stacks')
        if len(result_stacks):
            logger.debug('Stack "%s" exists', stack_name)
            stack_result = result_stacks[0]
            if stack_result['StackStatus'] in [
                    'ROLLBACK_COMPLETE',
                    'ROLLBACK_FAILED',
                    'ROLLBACK_IN_PROGRESS',
            ]:
                logger.debug(
                    'Stack "%s" is in a "bad" status(%s), deleting the stack '
                    'before deployment',
                    stack_name,
                    stack_result['StackStatus'],
                )
                cf_client.delete_stack(StackName=stack_name)
    except ClientError as e:
        # We are brutally parse and handle stack doesn't exist, since
        # "AmazonCloudFormationException" currently is not implemented in boto3. Once
        # the current error is implemented, we need to switch
        error_response = e.response.get('Error', {})
        error_code = error_response.get('Code')
        error_message = error_response.get('Message', 'Unknown')
        if error_code == 'ValidationError' and 'does not exist' in error_message:
            pass
        else:
            raise BentoMLException(str(e))
Example #4
0
File: env.py Project: prcvd/BentoML
    def __init__(
        self,
        name: str = None,
        channels: List[str] = None,
        dependencies: List[str] = None,
        default_env_yaml_file: str = None,
    ):
        self._yaml = YAML()
        self._yaml.default_flow_style = False

        if default_env_yaml_file:
            env_yml_file = Path(default_env_yaml_file)
            if not env_yml_file.is_file():
                raise BentoMLException(
                    f"Can not find conda environment config yaml file at: "
                    f"`{default_env_yaml_file}`")
            self._conda_env = self._yaml.load(env_yml_file)
        else:
            self._conda_env = self._yaml.load(DEFAULT_CONDA_ENV_BASE_YAML)

        if name:
            self.set_name(name)

        if channels:
            self.add_channels(channels)

        if dependencies:
            self.add_conda_dependencies(dependencies)
Example #5
0
def generate_aws_compatible_string(*items, max_length=63):
    """
    Generate a AWS resource name that is composed from list of string items. This
    function replaces all invalid characters in the given items into '-', and allow user
    to specify the max_length for each part separately by passing the item and its max
    length in a tuple, e.g.:

    >> generate_aws_compatible_string("abc", "def")
    >> 'abc-def'  # concatenate mupltiple parts

    >> generate_aws_compatible_string("abc_def")
    >> 'abc-def'  # replace invalid chars to '-'

    >> generate_aws_compatible_string(("ab", 1), ("bcd", 2), max_length=4)
    >> 'a-bc'  # trim based on max_length of each part
    """
    trimed_items = [
        item[0][:item[1]] if type(item) == tuple else item for item in items
    ]
    items = [item[0] if type(item) == tuple else item for item in items]

    for i in range(len(trimed_items)):
        if len('-'.join(items)) <= max_length:
            break
        else:
            items[i] = trimed_items[i]

    name = '-'.join(items)
    if len(name) > max_length:
        raise BentoMLException(
            'AWS resource name {} exceeds maximum length of {}'.format(
                name, max_length))
    invalid_chars = re.compile("[^a-zA-Z0-9-]|_")
    name = re.sub(invalid_chars, "-", name)
    return name
Example #6
0
def load(
    tag: t.Union[str, Tag],
    model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
) -> "PyFuncModel":
    """
    Load a model from BentoML local modelstore with given name.

    Args:
        tag (:code:`Union[str, Tag]`):
            Tag of a saved model in BentoML local modelstore.
        model_store (:mod:`~bentoml._internal.models.store.ModelStore`, default to :mod:`BentoMLContainer.model_store`):
            BentoML modelstore, provided by DI Container.

    Returns:
        :obj:`mlflow.pyfunc.PyFuncModel`: an instance of `mlflow.pyfunc.PyFuncModel` from BentoML modelstore.

    Examples:

    .. code-block:: python

        import bentoml

        model = bentoml.mlflow.load("mlflow_sklearn_model")

    """  # noqa
    model = model_store.get(tag)
    if model.info.module not in (MODULE_NAME, __name__):
        raise BentoMLException(
            f"Model {tag} was saved with module {model.info.module}, failed loading with {MODULE_NAME}."
        )
    mlflow_folder = model.path_of(model.info.options["mlflow_folder"])
    return mlflow.pyfunc.load_model(mlflow_folder,
                                    suppress_warnings=False)  # type: ignore
Example #7
0
 def update(self, deployment_pb, previous_deployment):
     try:
         ensure_docker_available_or_raise()
         deployment_spec = deployment_pb.spec
         bento_pb = self.yatai_service.GetBento(
             GetBentoRequest(
                 bento_name=deployment_spec.bento_name,
                 bento_version=deployment_spec.bento_version,
             )
         )
         if bento_pb.bento.uri.type not in (BentoUri.LOCAL, BentoUri.S3):
             raise BentoMLException(
                 'BentoML currently not support {} repository'.format(
                     BentoUri.StorageType.Name(bento_pb.bento.uri.type)
                 )
             )
         return self._update(
             deployment_pb, previous_deployment, bento_pb, bento_pb.bento.uri.uri
         )
     except BentoMLException as error:
         deployment_pb.state.state = DeploymentState.ERROR
         deployment_pb.state.error_message = (
             f'Error updating SageMaker deployment: {str(error)}'
         )
         return ApplyDeploymentResponse(
             status=error.status_proto, deployment=deployment_pb
         )
Example #8
0
def load_bento_service_class(bundle_path):
    """
    Load a BentoService class from saved bundle in given path

    :param bundle_path: A path to Bento files generated from BentoService#save,
        #save_to_dir, or the path to pip installed BentoService directory
    :return: BentoService class
    """
    config = load_saved_bundle_config(bundle_path)
    metadata = config["metadata"]

    # Find and load target module containing BentoService class from given path
    module_file_path = _find_module_file(bundle_path, metadata["service_name"],
                                         metadata["module_file"])

    # Prepend bundle_path to sys.path for loading extra python dependencies
    sys.path.insert(0, bundle_path)
    sys.path.insert(0, os.path.join(bundle_path, metadata["service_name"]))
    # Include zipimport modules
    zipimport_dir = os.path.join(bundle_path, metadata["service_name"],
                                 ZIPIMPORT_DIR)
    if os.path.exists(zipimport_dir):
        for p in os.listdir(zipimport_dir):
            logger.debug('adding %s to sys.path', p)
            sys.path.insert(0, os.path.join(zipimport_dir, p))

    module_name = metadata["module_name"]
    if module_name in sys.modules:
        logger.warning(
            "Module `%s` already loaded, using existing imported module.",
            module_name)
        module = sys.modules[module_name]
    elif sys.version_info >= (3, 5):
        import importlib.util

        spec = importlib.util.spec_from_file_location(module_name,
                                                      module_file_path)
        module = importlib.util.module_from_spec(spec)
        spec.loader.exec_module(module)
    elif sys.version_info >= (3, 3):
        from importlib.machinery import SourceFileLoader

        # pylint:disable=deprecated-method
        module = SourceFileLoader(module_name,
                                  module_file_path).load_module(module_name)
        # pylint:enable=deprecated-method
    else:
        raise BentoMLException("BentoML requires Python 3.4 and above")

    # Remove bundle_path from sys.path to avoid import naming conflicts
    sys.path.remove(bundle_path)

    model_service_class = module.__getattribute__(metadata["service_name"])
    # Set _bento_service_bundle_path, where BentoService will load its artifacts
    model_service_class._bento_service_bundle_path = bundle_path
    # Set cls._version, service instance can access it via svc.version
    model_service_class._bento_service_bundle_version = metadata[
        "service_version"]

    return model_service_class
Example #9
0
def load_saved_bundle_config(bundle_path):
    try:
        return SavedBundleConfig.load(os.path.join(bundle_path, "bentoml.yml"))
    except FileNotFoundError:
        raise BentoMLException(
            "BentoML can't locate config file 'bentoml.yml'"
            " in saved bundle in path: {}".format(bundle_path))
Example #10
0
    def handle_aws_lambda_event(self, event, func):
        if event["headers"].get("Content-Type", "").startswith("images/"):
            # decodebytes introduced at python3.1
            try:
                image_data = self.imread(base64.decodebytes(event["body"]),
                                         pilmode=self.pilmode)
            except AttributeError:
                image_data = self.imread(
                    base64.decodestring(event["body"]),  # pylint: disable=W1505
                    pilmode=self.convert_mode,
                )
        else:
            raise BentoMLException(
                "BentoML currently doesn't support Content-Type: {content_type} for "
                "AWS Lambda".format(
                    content_type=event["headers"]["Content-Type"]))

        if self.after_open:
            image_data = self.after_open(image_data)

        image_data = self.fastai_vision.pil2tensor(image_data, np.float32)
        if self.div:
            image_data = image_data.div_(255)
        if self.cls:
            image_data = self.cls(image_data)
        else:
            image_data = self.fastai_vision.Image(image_data)

        result = func(image_data)
        result = get_output_str(result, event["headers"].get("output", "json"))
        return {"statusCode": 200, "body": result}
Example #11
0
 def version(self):
     try:
         return self.__class__._bento_service_version
     except AttributeError:
         raise BentoMLException(
             "Only BentoService loaded from archive has version attribute"
         )
Example #12
0
    def _update(self, deployment_pb, current_deployment, bento_pb, bento_path):
        if loader._is_remote_path(bento_path):
            with loader._resolve_remote_bundle_path(bento_path) as local_path:
                return self._update(deployment_pb, current_deployment,
                                    bento_pb, local_path)
        updated_deployment_spec = deployment_pb.spec
        updated_lambda_deployment_config = (
            updated_deployment_spec.aws_lambda_operator_config)
        updated_bento_service_metadata = bento_pb.bento.bento_service_metadata
        describe_result = self.describe(deployment_pb)
        if describe_result.status.status_code != status_pb2.Status.OK:
            error_code, error_message = status_pb_to_error_code_and_message(
                describe_result.status)
            raise YataiDeploymentException(
                f'Failed fetching Lambda deployment current status - '
                f'{error_code}:{error_message}')
        latest_deployment_state = json.loads(describe_result.state.info_json)
        if 's3_bucket' in latest_deployment_state:
            lambda_s3_bucket = latest_deployment_state['s3_bucket']
        else:
            raise BentoMLException(
                'S3 Bucket is missing in the AWS Lambda deployment, please make sure '
                'it exists and try again')

        _deploy_lambda_function(
            deployment_pb=deployment_pb,
            bento_service_metadata=updated_bento_service_metadata,
            deployment_spec=updated_deployment_spec,
            lambda_s3_bucket=lambda_s3_bucket,
            lambda_deployment_config=updated_lambda_deployment_config,
            bento_path=bento_path,
        )

        return ApplyDeploymentResponse(deployment=deployment_pb,
                                       status=Status.OK())
 def _download_bento(self, bento_name, bento_version):
     with TempDirectory(cleanup=False) as temp_dir:
         try:
             temp_tar_path = os.path.join(temp_dir,
                                          f'{uuid.uuid4().hex[:12]}.tar')
             response_iterator = self.yatai_service.DownloadBento(
                 DownloadBentoRequest(bento_name=bento_name,
                                      bento_version=bento_version),
                 timeout=DEFAULT_GRPC_REQUEST_TIMEOUT,
             )
             with open(temp_tar_path, 'wb+') as file:
                 for response in response_iterator:
                     if response.status.status_code != status_pb2.Status.OK:
                         raise BentoMLException(
                             response.status.error_message)
                     file.write(response.bento_bundle)
                 file.seek(0)
                 temp_bundle_path = os.path.join(
                     temp_dir, f'{bento_name}_{bento_version}')
                 with tarfile.open(fileobj=file, mode='r') as tar:
                     tar.extractall(path=temp_bundle_path)
             return temp_bundle_path
         except grpc.RpcError as e:
             raise BentoMLRpcError(
                 e,
                 f'Failed to download {bento_name}:{bento_version} from '
                 f'the remote yatai server',
             )
    def list(
        self,
        bento_name: str = None,
        offset: int = None,
        limit: int = None,
        order_by: str = None,
        ascending_order: bool = None,
        labels: str = None,
    ) -> List["Bento"]:
        """
        List BentoServices that satisfy the specified criteria.

        Args:
            bento_name (`str`):
                BentoService name
            offset (`int`):
                offset of results
            limit (`int`):
                maximum number of returned results
            labels (`str`):
                sorted by given labels
            order_by (`str`):
                orders retrieved BentoService by :obj:`created_at` or :obj:`name`
            ascending_order (`bool`):
                direction of results order

        Returns:
            lists of :class:`~bentoml.BentoService` metadata.

        Example::

            from bentoml.yatai.client import get_yatai_client
            yatai_client = get_yatai_client()
            bentos_info_list = yatai_client.repository.list(labels='key=value,key2=value')
        """  # noqa: E501

        # TODO: ignore type checking for this function. This is
        #  due to all given arguments in `ListBentoRequest` are
        #  not optional types. One solution is to make all
        #  `ListBentoRequest` args in `list` positional. This could
        #  introduce different behaviour at different places in the
        #  codebase. Low triage
        list_bento_request = ListBentoRequest(
            bento_name=bento_name,  # type: ignore
            offset=offset,  # type: ignore
            limit=limit,  # type: ignore
            order_by=order_by,  # type: ignore
            ascending_order=ascending_order,  # type: ignore
        )

        if labels is not None:
            generate_gprc_labels_selector(list_bento_request.label_selectors,
                                          labels)

        result = self.yatai_service.ListBento(list_bento_request)
        if result.status.status_code != yatai_proto.status_pb2.Status.OK:
            error_code, error_message = status_pb_to_error_code_and_message(
                result.status)
            raise BentoMLException(f'{error_code}:{error_message}')
        return result.bentos
Example #15
0
 def __init__(
     self,
     archive_path,
     api_name,
     region=None,
     instance_count=None,
     instance_type=None,
 ):
     if which("docker") is None:
         raise ValueError(
             "docker is not installed, please install docker and then try again"
         )
     super(SagemakerDeployment, self).__init__(archive_path)
     self.region = DEFAULT_REGION if region is None else region
     self.instance_count = (DEFAULT_INSTANCE_COUNT
                            if instance_count is None else instance_count)
     self.instant_type = (DEFAULT_INSTANCE_TYPE
                          if instance_type is None else instance_type)
     apis = self.bento_service.get_service_apis()
     if api_name:
         self.api = next(item for item in apis if item.name == api_name)
     elif len(apis) == 1:
         self.api = apis[0]
     else:
         raise BentoMLException(
             "Please specify api-name, when more than one API is present in the "
             "archive")
     self.sagemaker_client = boto3.client("sagemaker",
                                          region_name=self.region)
     self.model_name = generate_aws_compatible_string(
         "bentoml-" + self.bento_service.name + "-" +
         self.bento_service.version)
     self.endpoint_config_name = generate_aws_compatible_string(
         self.bento_service.name + "-" + self.bento_service.version +
         "-configuration")
Example #16
0
def _find_module_file(bundle_path, service_name, module_file):
    # Simply join full path when module_file is just a file name,
    # e.g. module_file=="iris_classifier.py"
    module_file_path = os.path.join(bundle_path, service_name, module_file)
    if not os.path.isfile(module_file_path):
        # Try loading without service_name prefix, for loading from a installed PyPi
        module_file_path = os.path.join(bundle_path, module_file)

    # When module_file is located in sub directory
    # e.g. module_file=="foo/bar/iris_classifier.py"
    # This needs to handle the path differences between posix and windows platform:
    if not os.path.isfile(module_file_path):
        if sys.platform == "win32":
            # Try load a saved bundle created from posix platform on windows
            module_file_path = os.path.join(bundle_path, service_name,
                                            str(PurePosixPath(module_file)))
            if not os.path.isfile(module_file_path):
                module_file_path = os.path.join(
                    bundle_path, str(PurePosixPath(module_file)))
        else:
            # Try load a saved bundle created from windows platform on posix
            module_file_path = os.path.join(
                bundle_path, service_name,
                PureWindowsPath(module_file).as_posix())
            if not os.path.isfile(module_file_path):
                module_file_path = os.path.join(
                    bundle_path,
                    PureWindowsPath(module_file).as_posix())

    if not os.path.isfile(module_file_path):
        raise BentoMLException(
            "Can not locate module_file {} in saved bundle {}".format(
                module_file, bundle_path))

    return module_file_path
Example #17
0
    def add(self, deployment_pb):
        try:
            deployment_spec = deployment_pb.spec
            deployment_spec.aws_lambda_operator_config.region = (
                deployment_spec.aws_lambda_operator_config.region
                or get_default_aws_region()
            )
            if not deployment_spec.aws_lambda_operator_config.region:
                raise InvalidArgument('AWS region is missing')

            bento_pb = self.yatai_service.GetBento(
                GetBentoRequest(
                    bento_name=deployment_spec.bento_name,
                    bento_version=deployment_spec.bento_version,
                )
            )
            if bento_pb.bento.uri.type not in (BentoUri.LOCAL, BentoUri.S3):
                raise BentoMLException(
                    'BentoML currently not support {} repository'.format(
                        BentoUri.StorageType.Name(bento_pb.bento.uri.type)
                    )
                )

            return self._add(deployment_pb, bento_pb, bento_pb.bento.uri.uri)
        except BentoMLException as error:
            deployment_pb.state.state = DeploymentState.ERROR
            deployment_pb.state.error_message = f'Error: {str(error)}'
            return ApplyDeploymentResponse(
                status=error.status_proto, deployment=deployment_pb
            )
Example #18
0
    def apply(self, deployment_pb, yatai_service, prev_deployment=None):
        try:
            ensure_docker_available_or_raise()
            deployment_spec = deployment_pb.spec
            sagemaker_config = deployment_spec.sagemaker_operator_config
            if sagemaker_config is None:
                raise BentoMLDeploymentException('Sagemaker configuration is missing.')

            bento_pb = yatai_service.GetBento(
                GetBentoRequest(
                    bento_name=deployment_spec.bento_name,
                    bento_version=deployment_spec.bento_version,
                )
            )
            if bento_pb.bento.uri.type not in (BentoUri.LOCAL, BentoUri.S3):
                raise BentoMLException(
                    'BentoML currently not support {} repository'.format(
                        bento_pb.bento.uri.type
                    )
                )

            return self._apply(
                deployment_pb,
                bento_pb,
                yatai_service,
                bento_pb.bento.uri.uri,
                prev_deployment,
            )

        except BentoMLException as error:
            return ApplyDeploymentResponse(status=exception_to_return_status(error))
Example #19
0
    def add(self, deployment_pb):
        try:
            deployment_spec = deployment_pb.spec
            sagemaker_config = deployment_spec.sagemaker_operator_config
            sagemaker_config.region = (
                sagemaker_config.region or get_default_aws_region()
            )

            ensure_docker_available_or_raise()
            if sagemaker_config is None:
                raise YataiDeploymentException('Sagemaker configuration is missing.')

            bento_pb = self.yatai_service.GetBento(
                GetBentoRequest(
                    bento_name=deployment_spec.bento_name,
                    bento_version=deployment_spec.bento_version,
                )
            )
            if bento_pb.bento.uri.type not in (BentoUri.LOCAL, BentoUri.S3):
                raise BentoMLException(
                    'BentoML currently not support {} repository'.format(
                        BentoUri.StorageType.Name(bento_pb.bento.uri.type)
                    )
                )
            return self._add(deployment_pb, bento_pb, bento_pb.bento.uri.uri)

        except BentoMLException as error:
            deployment_pb.state.state = DeploymentState.ERROR
            deployment_pb.state.error_message = (
                f'Error creating SageMaker deployment: {str(error)}'
            )
            return ApplyDeploymentResponse(
                status=error.status_proto, deployment=deployment_pb
            )
Example #20
0
 def __init__(self, name, backend='onnxruntime'):
     super(OnnxModelArtifact, self).__init__(name)
     if backend not in SUPPORTED_ONNX_BACKEND:
         raise BentoMLException(
             f'"{backend}" runtime is currently not supported for OnnxModelArtifact'
         )
     self.backend = backend
Example #21
0
    def update_lambda_deployment(
        self,
        deployment_name,
        namespace=None,
        bento_name=None,
        bento_version=None,
        memory_size=None,
        timeout=None,
        wait=None,
    ):
        get_deployment_result = self.get(namespace=namespace, name=deployment_name)
        if get_deployment_result.status.status_code != status_pb2.Status.OK:
            error_code = status_pb2.Status.Code.Name(
                get_deployment_result.status.status_code
            )
            error_message = status_pb2.status.error_message
            raise BentoMLException(
                f'Failed to retrieve current deployment {deployment_name} '
                f'in {namespace}.  {error_code}:{error_message}'
            )
        deployment_pb = get_deployment_result.deployment
        if bento_name:
            deployment_pb.spec.bento_name = bento_name
        if bento_version:
            deployment_pb.spec.bento_version = bento_version
        if memory_size:
            deployment_pb.spec.aws_lambda_operator_config.memory_size = memory_size
        if timeout:
            deployment_pb.spec.aws_lambda_operator_config.timeout = timeout
        logger.debug('Updated configuration for Lambda deployment %s', deployment_name)

        return self.apply(deployment_pb, wait)
Example #22
0
def check_tensor_spec(
    tensor: "tf_ext.TensorLike",
    tensor_spec: t.Union[str, t.Tuple[str, ...], t.List[str],
                         "tf_ext.UnionTensorSpec"],
    class_name: t.Optional[str] = None,
) -> bool:
    """
    :code:`isinstance` wrapper to check spec for a given tensor.

    Args:
        tensor (:code:`Union[tf.Tensor, tf.EagerTensor, tf.SparseTensor, tf.RaggedTensor]`):
            tensor class to check.
        tensor_spec (:code:`Union[str, Tuple[str,...]]`):
            class used to check with :obj:`tensor`. Follows :obj:`TENSOR_CLASS_NAME`
        class_name (:code:`str`, `optional`, default to :code:`None`):
            Optional class name to pass for correct path of tensor spec. If none specified,
            then :code:`class_name` will be determined via given spec class.

    Returns:
        `bool` if given tensor match a given spec.
    """
    if tensor_spec is None:
        raise BentoMLException("`tensor` should not be None")
    tensor_cls = type(tensor).__name__
    if isinstance(tensor_spec, str):
        return tensor_cls == tensor_spec.split(".")[-1]
    elif isinstance(tensor_spec, (list, tuple, set)):
        return all(check_tensor_spec(tensor, k) for k in tensor_spec)
    else:
        if class_name is None:
            class_name = (str(tensor_spec.__class__).replace("<class '",
                                                             "").replace(
                                                                 "'>", ""))
        return LazyType["tf_ext.TensorSpec"](class_name).isinstance(tensor)
Example #23
0
def process_docker_api_line(payload):
    """ Process the output from API stream, throw an Exception if there is an error """
    # Sometimes Docker sends to "{}\n" blocks together...
    errors = []
    for segment in payload.decode("utf-8").strip().split("\n"):
        line = segment.strip()
        if line:
            try:
                line_payload = json.loads(line)
            except ValueError as e:
                logger.warning(
                    "Could not decipher payload from Docker API: %s", str(e))
            if line_payload:
                if "errorDetail" in line_payload:
                    error = line_payload["errorDetail"]
                    error_msg = 'Error running docker command: {}: {}'.format(
                        error["code"], error['message'])
                    logger.error(error_msg)
                    errors.append(error_msg)
                elif "stream" in line_payload:
                    logger.info(line_payload['stream'])

    if errors:
        error_msg = ";".join(errors)
        raise BentoMLException(
            "Error running docker command: {}".format(error_msg))
Example #24
0
def resolve_bundle_path(
    bento: str,
    pip_installed_bundle_path: Optional[str] = None,
    yatai_url: Optional[str] = None,
) -> str:
    from bentoml.exceptions import BentoMLException
    from bentoml.yatai.client import get_yatai_client

    if pip_installed_bundle_path:
        assert (
            bento is None
        ), "pip installed BentoService commands should not have Bento argument"
        return pip_installed_bundle_path

    if os.path.isdir(bento) or is_s3_url(bento) or is_gcs_url(bento):
        # saved_bundle already support loading local, s3 path and gcs path
        return bento

    elif ":" in bento:
        # assuming passing in BentoService in the form of Name:Version tag
        yatai_client = get_yatai_client(yatai_url)
        bento_pb = yatai_client.repository.get(bento)
        return resolve_bento_bundle_uri(bento_pb)
    else:
        raise BentoMLException(
            f'BentoService "{bento}" not found - either specify the file path of '
            f"the BentoService saved bundle, or the BentoService id in the form of "
            f'"name:version"')
Example #25
0
def _resolve_remote_bundle_path(bundle_path):
    if is_s3_url(bundle_path):
        import boto3

        parsed_url = urlparse(bundle_path)
        bucket_name = parsed_url.netloc
        object_name = parsed_url.path.lstrip('/')

        s3 = boto3.client('s3')
        fileobj = io.BytesIO()
        s3.download_fileobj(bucket_name, object_name, fileobj)
        fileobj.seek(0, 0)
    elif _is_http_url(bundle_path):
        import requests

        response = requests.get(bundle_path)
        fileobj = io.BytesIO()
        fileobj.write(response.content)
        fileobj.seek(0, 0)
    else:
        raise BentoMLException(
            f"Saved bundle path: '{bundle_path}' is not supported")

    with tarfile.open(mode="r:gz", fileobj=fileobj) as tar:
        with tempfile.TemporaryDirectory() as tmpdir:
            filename = tar.getmembers()[0].name
            tar.extractall(path=tmpdir)
            yield os.path.join(tmpdir, filename)
Example #26
0
 def _enable_gpu(self):
     enable_gpu = self.resource_quota.on_gpu
     if enable_gpu and not paddle.is_compiled_with_cuda():  # type: ignore
         raise BentoMLException(
             "`resource_quota.on_gpu=True` while CUDA is not currently supported by existing paddlepaddle."
             " Make sure to install `paddlepaddle-gpu` and try again.")
     return enable_gpu
Example #27
0
def lambda_deploy(project_dir, aws_region, stack_name):
    # if the stack name exists and the state is in rollback_complete or
    # other 'bad' state, we will delete the stack first, and then deploy
    # it
    logger.debug('Ensure stack "%s" is ready to deploy', stack_name)
    ensure_is_ready_to_deploy_to_cloud_formation(stack_name, aws_region)
    logger.debug('Stack "%s"is ready to deploy', stack_name)

    template_file = os.path.join(project_dir, '.aws-sam', 'build',
                                 'packaged.yaml')
    return_code, stdout, stderr = call_sam_command(
        [
            'deploy',
            '--stack-name',
            stack_name,
            '--capabilities',
            'CAPABILITY_IAM',
            '--template-file',
            template_file,
            '--region',
            aws_region,
        ],
        project_dir=project_dir,
        region=aws_region,
    )
    if return_code != 0:
        error_message = stderr
        if not error_message:
            error_message = stdout
        raise BentoMLException(
            'Failed to deploy lambda function. {}'.format(error_message))
    else:
        return stdout
Example #28
0
    def update(self, deployment_pb, previous_deployment):
        try:
            ensure_sam_available_or_raise()
            ensure_docker_available_or_raise()
            deployment_spec = deployment_pb.spec
            ec2_deployment_config = deployment_spec.aws_ec2_operator_config
            ec2_deployment_config.region = (ec2_deployment_config.region
                                            or get_default_aws_region())
            if not ec2_deployment_config.region:
                raise InvalidArgument("AWS region is missing")

            bento_pb = self.yatai_service.GetBento(
                GetBentoRequest(
                    bento_name=deployment_spec.bento_name,
                    bento_version=deployment_spec.bento_version,
                ))

            if bento_pb.bento.uri.type not in (BentoUri.LOCAL, BentoUri.S3):
                raise BentoMLException(
                    "BentoML currently not support {} repository".format(
                        BentoUri.StorageType.Name(bento_pb.bento.uri.type)))

            return self._update(
                deployment_pb,
                previous_deployment,
                bento_pb.bento.uri.uri,
                ec2_deployment_config.region,
            )
        except BentoMLException as error:
            deployment_pb.state.state = DeploymentState.ERROR
            deployment_pb.state.error_message = f"Error: {str(error)}"
            return ApplyDeploymentResponse(status=error.status_proto,
                                           deployment=deployment_pb)
Example #29
0
 def requirements_txt_content(self):
     requirements_txt_file = Path(self._requirements_txt_file)
     if not requirements_txt_file.is_file():
         raise BentoMLException(
             f"requirement txt file not found at '{requirements_txt_file}'"
         )
     return requirements_txt_file
Example #30
0
    def handle_aws_lambda_event(self, event, func):
        try:
            from imageio import imread
        except ImportError:
            raise ImportError(
                "imageio package is required to use ImageHandler")

        if event["headers"].get("Content-Type",
                                None) in ACCEPTED_CONTENT_TYPES:
            # decodebytes introduced at python3.1
            try:
                image = imread(base64.decodebytes(event["body"]),
                               pilmode=self.pilmode)
            except AttributeError:
                image = imread(
                    base64.decodestring(event["body"]),  # pylint: disable=W1505
                    pilmode=self.pilmode,
                )
        else:
            raise BentoMLException(
                "BentoML currently doesn't support Content-Type: {content_type} for "
                "AWS Lambda".format(
                    content_type=event["headers"]["Content-Type"]))

        result = func(image)
        result = get_output_str(result, event["headers"].get("output", "json"))
        return {"statusCode": 200, "body": result}