# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import json
import itertools
from typing import Iterable

import argparse

from bentoml.utils.lazy_loader import LazyLoader
from bentoml.marshal.utils import SimpleResponse, SimpleRequest
from bentoml.adapters.utils import TfTensorJsonEncoder
from bentoml.adapters.base_output import BaseOutputAdapter

np = LazyLoader('np', globals(), 'numpy')


def tf_to_numpy(tensor):
    '''
    Tensor -> ndarray
    List[Tensor] -> tuple[ndarray]
    '''
    import tensorflow as tf

    if isinstance(tensor, (list, tuple)):
        return tuple(tf_to_numpy(t) for t in tensor)

    if tf.__version__.startswith("1."):
        with tf.compat.v1.Session():
            return tensor.numpy()
示例#2
0
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from typing import BinaryIO, Iterable, Sequence, Tuple

from bentoml.adapters.file_input import FileInput
from bentoml.adapters.utils import (
    check_file_extension,
    get_default_accept_image_formats,
)
from bentoml.types import InferenceTask
from bentoml.utils.lazy_loader import LazyLoader

# BentoML optional dependencies, using lazy load to avoid ImportError
imageio = LazyLoader('imageio', globals(), 'imageio')
numpy = LazyLoader('numpy', globals(), 'numpy')

ApiFuncArgs = Tuple[Sequence['numpy.ndarray'], ]


class ImageInput(FileInput):
    """Convert incoming image data from http request, cli or lambda event into imageio
    array (a subclass of numpy.ndarray that has a meta attribute) and pass down to
    user defined API functions.

    ** To operate raw files or PIL.Image obj, use the low level :class:`.FileInput`. **

    Parameters
    ----------
    accept_image_formats : List[str]
示例#3
0
    _echo,
    CLI_COLOR_SUCCESS,
    parse_yaml_file_callback,
    validate_labels_query_callback,
)
from bentoml.yatai.deployment import ALL_NAMESPACE_TAG
from bentoml.utils import status_pb_to_error_code_and_message
from bentoml.exceptions import CLIException
from bentoml.cli.utils import (
    Spinner,
    _print_deployment_info,
    _print_deployments_info,
    get_default_yatai_client,
)

yatai_proto = LazyLoader('yatai_proto', globals(), 'bentoml.yatai.proto')

# pylint: disable=unused-variable

logger = logging.getLogger(__name__)

DEFAULT_SAGEMAKER_INSTANCE_TYPE = 'ml.m4.xlarge'
DEFAULT_SAGEMAKER_INSTANCE_COUNT = 1


def get_deployment_sub_command():
    # pylint: disable=unused-variable

    @click.group(
        help='Commands for manageing and operating BentoService deployments',
        cls=BentoMLCommandGroup,
示例#4
0
import argparse
import base64
from io import BytesIO
from typing import Iterable

from werkzeug.utils import secure_filename
from werkzeug.wrappers import Request

from bentoml import config
from bentoml.utils.lazy_loader import LazyLoader
from bentoml.marshal.utils import SimpleRequest, SimpleResponse
from bentoml.exceptions import BadInput
from bentoml.adapters.base_input import BaseInputAdapter

# BentoML optional dependencies, using lazy load to avoid ImportError
imageio = LazyLoader('imageio', globals(), 'imageio')


def verify_image_format_or_raise(file_name, accept_format_list):
    """
    Raise error if file's extension is not in the accept_format_list
    """
    if accept_format_list:
        _, extension = os.path.splitext(file_name)
        if extension.lower() not in accept_format_list:
            raise BadInput(
                "Input file not in supported format list: {}".format(accept_format_list)
            )


def get_default_accept_image_formats():
示例#5
0
)
from bentoml.cli.deployment import (
    _print_deployment_info,
    _print_deployments_info,
)
from bentoml.yatai.deployment import ALL_NAMESPACE_TAG
from bentoml.yatai.deployment.aws_ec2.constants import (
    DEFAULT_MIN_SIZE,
    DEFAULT_DESIRED_CAPACITY,
    DEFAULT_MAX_SIZE,
    DEFAULT_INSTANCE_TYPE,
    DEFAULT_AMI_ID,
)
from bentoml.exceptions import CLIException

yatai_proto = LazyLoader("yatai_proto", globals(), "bentoml.yatai.proto")


def get_aws_ec2_sub_command():
    # pylint: disable=unused-variable

    @click.group(name="ec2", cls=BentoMLCommandGroup, help="commands for EC2")
    def aws_ec2():
        pass

    @aws_ec2.command(help="Deploy BentoService to EC2")
    @click.argument("name", type=click.STRING)
    @click.option(
        "-b",
        "--bento",
        type=click.STRING,
示例#6
0
# See the License for the specific language governing permissions and
# limitations under the License.

import argparse
from typing import Iterable, Mapping, Optional, Sequence, Tuple

from bentoml.adapters.string_input import StringInput
from bentoml.exceptions import MissingDependencyException
from bentoml.types import HTTPHeaders, InferenceTask
from bentoml.utils.dataframe_util import (
    PANDAS_DATAFRAME_TO_JSON_ORIENT_OPTIONS,
    read_dataframes_from_json_n_csv,
)
from bentoml.utils.lazy_loader import LazyLoader

pandas = LazyLoader('pandas', globals(), 'pandas')

DataFrameTask = InferenceTask[str]
ApiFuncArgs = Tuple['pandas.DataFrame']


class DataframeInput(StringInput):
    """
    Convert various inputs(HTTP, Aws Lambda or CLI) to pandas dataframe, passing it to
    API functions.

    Parameters
    ----------
    orient : str
        Indication of expected JSON string format.
        Compatible JSON strings can be produced by ``to_json()`` with a
示例#7
0
from io import BytesIO
import json

from werkzeug.utils import secure_filename
from flask import Response

from bentoml.utils.lazy_loader import LazyLoader
from bentoml.utils.dataframe_util import PANDAS_DATAFRAME_TO_JSON_ORIENT_OPTIONS
from bentoml.exceptions import BadInput
from bentoml.adapters.base_input import BaseInputAdapter
from bentoml.adapters.image_input import (
    verify_image_format_or_raise,
    get_default_accept_image_formats,
)

np = LazyLoader('np', globals(), 'numpy')

# BentoML optional dependencies, using lazy load to avoid ImportError
pd = LazyLoader('pd', globals(), 'pandas')
fastai = LazyLoader('fastai', globals(), 'fastai')
imageio = LazyLoader('imageio', globals(), 'imageio')


class NumpyJsonEncoder(json.JSONEncoder):
    """ Special json encoder for numpy types """
    def default(self, o):  # pylint: disable=method-hidden
        if isinstance(o, np.generic):
            return o.item()

        if isinstance(o, np.ndarray):
            return o.tolist()
# See the License for the specific language governing permissions and
# limitations under the License.

import traceback
from typing import BinaryIO, Sequence, Tuple

from bentoml.adapters.legacy_image_input import LegacyImageInput
from bentoml.adapters.utils import (
    check_file_extension,
    get_default_accept_image_formats,
)
from bentoml.types import InferenceTask
from bentoml.utils.lazy_loader import LazyLoader

# BentoML optional dependencies, using lazy load to avoid ImportError
fastai = LazyLoader('fastai', globals(), 'fastai')
imageio = LazyLoader('imageio', globals(), 'imageio')
numpy = LazyLoader('numpy', globals(), 'numpy')

MultiImgTask = InferenceTask[Tuple[BinaryIO,
                                   ...]]  # image file bytes, json bytes
ApiFuncArgs = Tuple[Sequence['numpy.ndarray'], ...]


class FastaiImageInput(LegacyImageInput):
    """InputAdapter specified for handling image input following fastai conventions
    by passing type fastai.vision.Image to user API function and providing options
    such as div, cls, and after_open

    Args:
        input_names ([str]]): A tuple of acceptable input name for HTTP request.