Exemple #1
0
def test_multiprocess_debug_mode():
    """get_debug_mode() and set_debug_mode() should preserve between processes"""

    set_debug_mode(True)
    assert get_debug_mode()

    process = multiprocessing.Process(
        target=assert_debug_mode, args=[True], daemon=True
    )
    process.start()
    process.join()

    assert process.exitcode == 0

    set_debug_mode(False)
    assert not get_debug_mode()

    process = multiprocessing.Process(
        target=assert_debug_mode, args=[False], daemon=True
    )
    process.start()
    process.join()

    assert process.exitcode == 0

    del os.environ["BENTOML_DEBUG"]
Exemple #2
0
def test_set_debug_mode():
    set_debug_mode(True)
    assert get_debug_mode()

    set_debug_mode(False)
    assert not get_debug_mode()

    del os.environ["BENTOML_DEBUG"]
Exemple #3
0
def configure_logging(logging_level=None):
    base_log_dir = os.path.expanduser(config("logging").get("BASE_LOG_DIR"))
    Path(base_log_dir).mkdir(parents=True, exist_ok=True)
    if os.path.exists(config("logging").get("logging_config")):
        logging_config_path = config("logging").get("logging_config")
        with open(logging_config_path, "rb") as f:
            logging_config = YAML().load(f.read())
        logging.config.dictConfig(logging_config)
        logging.getLogger(__name__).debug(
            "Loaded logging configuration from %s." % logging_config_path)
    else:
        if logging_level is None:
            logging_level = config("logging").get("LEVEL").upper()
            if "LOGGING_LEVEL" in config("logging"):
                # Support legacy config name e.g. BENTOML__LOGGING__LOGGING_LEVEL=debug
                logging_level = config("logging").get("LOGGING_LEVEL").upper()

        if get_debug_mode():
            logging_level = logging.getLevelName(logging.DEBUG)

        logging_config = get_logging_config_dict(logging_level, base_log_dir)
        logging.config.dictConfig(logging_config)
        logging.getLogger(__name__).debug(
            "Loaded logging configuration from default configuration " +
            "and environment variables.")
Exemple #4
0
def configure_logging(
    logging_level: str = Provide[BentoMLContainer.config.logging.level],
    base_log_dir: str = Provide[BentoMLContainer.logging_file_directory],
    console_logging_enabled: bool = Provide[
        BentoMLContainer.config.logging.console.enabled],
    file_logging_enabled: bool = Provide[
        BentoMLContainer.config.logging.file.enabled],
    advanced_enabled: bool = Provide[
        BentoMLContainer.config.logging.advanced.enabled],
    advanced_config: dict = Provide[
        BentoMLContainer.config.logging.advanced.config],
):
    Path(base_log_dir).mkdir(parents=True, exist_ok=True)
    if advanced_enabled:
        logging.config.dictConfig(advanced_config)
        logging.getLogger(__name__).debug(
            "Configured logging with advanced configuration, config=%s",
            advanced_config)
    else:
        if get_debug_mode():
            logging_level = logging.getLevelName(logging.DEBUG)

        logging_config = get_logging_config_dict(logging_level, base_log_dir,
                                                 console_logging_enabled,
                                                 file_logging_enabled)
        logging.config.dictConfig(logging_config)
        logging.getLogger(__name__).debug(
            "Configured logging with simple configuration, "
            "level=%s, directory=%s, console_enabled=%s, file_enabled=%s",
            logging_level,
            base_log_dir,
            console_logging_enabled,
            file_logging_enabled,
        )
Exemple #5
0
    def __exit__(self, exc_type, exc_val, exc_tb):
        if get_debug_mode():
            logger.debug('BentoML in debug mode, keeping temp directory "%s"',
                         self.path)
            return

        if self._cleanup:
            self.cleanup()
Exemple #6
0
 def start(self):
     """
     Start an REST server at the specific port on the instance or parameter.
     """
     # Bentoml api service is not thread safe.
     # Flask dev server enabled threaded by default, disable it.
     self.app.run(
         port=self.port, threaded=False, debug=get_debug_mode(), use_reloader=False,
     )
Exemple #7
0
def configure_logging(logging_level=None):
    if logging_level is None:
        logging_level = config("logging").get("LOGGING_LEVEL").upper()
    if get_debug_mode():
        logging_level = logging.getLevelName(logging.DEBUG)

    base_log_dir = os.path.expanduser(config("logging").get("BASE_LOG_DIR"))
    Path(base_log_dir).mkdir(parents=True, exist_ok=True)
    logging_config = get_logging_config_dict(logging_level, base_log_dir)
    logging.config.dictConfig(logging_config)
Exemple #8
0
 def run(
     self,
     port: int = Provide[BentoMLContainer.forward_port],
     host: str = Provide[BentoMLContainer.forward_host],
 ):
     """
     Start an REST server at the specific port on the instance or parameter.
     """
     # Bentoml api service is not thread safe.
     # Flask dev server enabled threaded by default, disable it.
     logger.info("Starting BentoML API server in development mode..")
     self.app.run(
         host=host,
         port=port,
         threaded=False,
         debug=get_debug_mode(),
         use_reloader=False,
     )
Exemple #9
0
def test_get_debug_mode():
    os.environ["BENTOML_DEBUG"] = "TRUE"
    assert get_debug_mode()

    os.environ["BENTOML_DEBUG"] = "true"
    assert get_debug_mode()

    os.environ["BENTOML_DEBUG"] = "True"
    assert get_debug_mode()

    os.environ["BENTOML_DEBUG"] = "FALSE"
    assert not get_debug_mode()

    os.environ["BENTOML_DEBUG"] = "false"
    assert not get_debug_mode()

    os.environ["BENTOML_DEBUG"] = "False"
    assert not get_debug_mode()

    del os.environ["BENTOML_DEBUG"]
    assert not get_debug_mode()
Exemple #10
0
def assert_debug_mode(enabled: bool):
    if get_debug_mode() is enabled:
        sys.exit(0)
    else:
        sys.exit(1)
Exemple #11
0
def start_yatai_service_grpc_server(
    db_url,
    grpc_port,
    ui_port,
    with_ui,
    base_url,
    repository_type,
    file_system_directory,
    s3_url,
    s3_endpoint_url,
    gcs_url,
    web_ui_log_path: str = Provide[BentoMLContainer.yatai_logging_path],
):
    # Lazily import grpcio for YataiSerivce gRPC related actions
    import grpc
    from bentoml.yatai.db import DB
    from bentoml.yatai.repository import create_repository
    from bentoml.yatai.yatai_service_impl import get_yatai_service_impl
    from bentoml.yatai.proto.yatai_service_pb2_grpc import add_YataiServicer_to_server
    from bentoml.yatai.proto.yatai_service_pb2_grpc import YataiServicer

    YataiServicerImpl = get_yatai_service_impl(YataiServicer)
    yatai_service = YataiServicerImpl(
        repository=create_repository(repository_type, file_system_directory,
                                     s3_url, s3_endpoint_url, gcs_url),
        database=DB(db_url),
    )

    # Define interceptors here
    grpc_interceptors = [PromServerInterceptor(), ServiceLatencyInterceptor()]
    server = grpc.server(
        futures.ThreadPoolExecutor(max_workers=10),
        interceptors=grpc_interceptors,
    )
    add_YataiServicer_to_server(yatai_service, server)
    debug_mode = get_debug_mode()
    if debug_mode:
        try:
            logger.debug("Enabling gRPC server reflection for debugging")
            from bentoml.yatai.proto import yatai_service_pb2
            from grpc_reflection.v1alpha import reflection

            SERVICE_NAMES = (
                yatai_service_pb2.DESCRIPTOR.services_by_name["Yatai"].
                full_name,
                reflection.SERVICE_NAME,
            )
            reflection.enable_server_reflection(SERVICE_NAMES, server)
        except ImportError:
            logger.debug(
                "Failed to enable gRPC server reflection, missing required package: "
                '"pip install grpcio-reflection"')
    server.add_insecure_port(f"[::]:{grpc_port}")

    # NOTE: the current implementation sets prometheus_port to
    # 50052 to accomodate with Makefile setups. Currently there
    # isn't a way to find the reserve_free_port dynamically inside
    # Makefile to find the free ports for prometheus_port without
    # the help of a shell scripts.
    prometheus_port = 50052
    with reserve_free_port() as port:
        prometheus_port = port
    # prevents wsgi to see prometheus_port as used
    start_http_server(prometheus_port)
    server.start()
    if with_ui:
        ensure_node_available_or_raise()
        yatai_grpc_server_address = f"localhost:{grpc_port}"
        prometheus_address = f"http://localhost:{prometheus_port}"
        async_start_yatai_service_web_ui(
            yatai_grpc_server_address,
            prometheus_address,
            ui_port,
            web_ui_log_path,
            debug_mode,
            base_url,
        )

    # We don't import _echo function from click_utils because of circular dep
    if with_ui:
        if debug_mode is True:
            ui_port = 8080
        web_ui_link = f"http://127.0.0.1:{ui_port}"
        if base_url != ".":
            web_ui_link += f"/{base_url}"
        web_ui_message = f"running on {web_ui_link}"
    else:
        web_ui_message = "off"
    if debug_mode:
        prom_ui_message = "off"
    else:
        prom_ui_message = f"running on http://127.0.0.1:{ui_port}/metrics\n"

    click.echo(
        f"* Starting BentoML YataiService gRPC Server\n"
        f'* Debug mode: { "on" if debug_mode else "off"}\n'
        f"* Web UI: {web_ui_message}\n"
        f"* Running on 127.0.0.1:{grpc_port} (Press CTRL+C to quit)\n"
        f"* Prometheus: {prom_ui_message}\n"
        f"* Help and instructions: "
        f"https://docs.bentoml.org/en/latest/guides/yatai_service.html\n"
        f'{f"* Web server log can be found here: {web_ui_log_path}" if with_ui else ""}'
        f"\n-----\n"
        f"* Usage in Python:\n"
        f'*  bento_svc.save(yatai_url="127.0.0.1:{grpc_port}")\n'
        f"*  from bentoml.yatai.client import get_yatai_client\n"
        f'*  get_yatai_client("127.0.0.1:{grpc_port}").repository.list()\n'
        f"* Usage in CLI:\n"
        f"*  bentoml list --yatai-url=127.0.0.1:{grpc_port}\n"
        f"*  bentoml containerize IrisClassifier:latest --yatai-url=127.0.0.1:"
        f"{grpc_port}\n"
        f"*  bentoml push IrisClassifier:20200918001645_CD2886 --yatai-url=127.0.0.1:"
        f"{grpc_port}\n"
        f"*  bentoml pull IrisClassifier:20200918001645_CD2886 --yatai-url=127.0.0.1:"
        f"{grpc_port}\n"
        f"*  bentoml retrieve IrisClassifier:20200918001645_CD2886 "
        f'--yatai-url=127.0.0.1:{grpc_port} --target_dir="/tmp/foo/bar"\n'
        f"*  bentoml delete IrisClassifier:20200918001645_CD2886 "
        f"--yatai-url=127.0.0.1:{grpc_port}\n"
        # TODO: simplify the example usage here once related documentation is ready
    )

    try:
        while True:
            time.sleep(_ONE_DAY_IN_SECONDS)
    except KeyboardInterrupt:
        logger.info("Terminating YataiService gRPC server..")
        server.stop(grace=None)
Exemple #12
0
def start_yatai_service_grpc_server(
    db_url, repo_base_url, grpc_port, ui_port, with_ui, s3_endpoint_url
):
    from bentoml.yatai.yatai_service_impl import YataiService

    yatai_service = YataiService(
        db_url=db_url, repo_base_url=repo_base_url, s3_endpoint_url=s3_endpoint_url,
    )
    server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
    add_YataiServicer_to_server(yatai_service, server)
    debug_mode = get_debug_mode()
    if debug_mode:
        try:
            logger.debug('Enabling gRPC server reflection for debugging')
            from grpc_reflection.v1alpha import reflection
            from bentoml.yatai.proto import yatai_service_pb2

            SERVICE_NAMES = (
                yatai_service_pb2.DESCRIPTOR.services_by_name['Yatai'].full_name,
                reflection.SERVICE_NAME,
            )
            reflection.enable_server_reflection(SERVICE_NAMES, server)
        except ImportError:
            logger.debug(
                'Failed enabling gRPC server reflection, missing required package: '
                '"pip install grpcio-reflection"'
            )
    server.add_insecure_port(f'[::]:{grpc_port}')
    server.start()
    if with_ui:
        web_ui_log_path = os.path.join(
            config("logging").get("BASE_LOG_DIR"),
            config('logging').get("yatai_web_server_log_filename"),
        )

        ensure_node_available_or_raise()
        yatai_grpc_server_address = f'localhost:{grpc_port}'
        async_start_yatai_service_web_ui(
            yatai_grpc_server_address, ui_port, web_ui_log_path, debug_mode
        )

    # We don't import _echo function from click_utils because of circular dep
    click.echo(
        f'* Starting BentoML YataiService gRPC Server\n'
        f'* Debug mode: { "on" if debug_mode else "off"}\n'
        f'* Web UI: {f"running on http://127.0.0.1:{ui_port}" if with_ui else "off"}\n'
        f'* Running on 127.0.0.1:{grpc_port} (Press CTRL+C to quit)\n'
        f'* Usage:\n'
        f'*  Set config: `bentoml config set yatai_service.url=127.0.0.1:{grpc_port}`\n'
        f'*  Set env var: `export BENTOML__YATAI_SERVICE__URL=127.0.0.1:{grpc_port}`\n'
        f'* Help and instructions: '
        f'https://docs.bentoml.org/en/latest/guides/yatai_service.html\n'
        f'{f"* Web server log can be found here: {web_ui_log_path}" if with_ui else ""}'
    )

    try:
        while True:
            time.sleep(_ONE_DAY_IN_SECONDS)
    except KeyboardInterrupt:
        logger.info("Terminating YataiService gRPC server..")
        server.stop(grace=None)
Exemple #13
0
def start_yatai_service_grpc_server(
    db_url, repo_base_url, grpc_port, ui_port, with_ui, s3_endpoint_url, base_url
):
    # Lazily import grpcio for YataiSerivce gRPC related actions
    import grpc
    from bentoml.yatai.yatai_service_impl import get_yatai_service_impl
    from bentoml.yatai.proto.yatai_service_pb2_grpc import add_YataiServicer_to_server
    from bentoml.yatai.proto.yatai_service_pb2_grpc import YataiServicer

    YataiServicerImpl = get_yatai_service_impl(YataiServicer)
    yatai_service = YataiServicerImpl(
        db_url=db_url, repo_base_url=repo_base_url, s3_endpoint_url=s3_endpoint_url,
    )
    server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
    add_YataiServicer_to_server(yatai_service, server)
    debug_mode = get_debug_mode()
    if debug_mode:
        try:
            logger.debug('Enabling gRPC server reflection for debugging')
            from grpc_reflection.v1alpha import reflection
            from bentoml.yatai.proto import yatai_service_pb2

            SERVICE_NAMES = (
                yatai_service_pb2.DESCRIPTOR.services_by_name['Yatai'].full_name,
                reflection.SERVICE_NAME,
            )
            reflection.enable_server_reflection(SERVICE_NAMES, server)
        except ImportError:
            logger.debug(
                'Failed to enable gRPC server reflection, missing required package: '
                '"pip install grpcio-reflection"'
            )
    server.add_insecure_port(f'[::]:{grpc_port}')
    server.start()
    if with_ui:
        web_ui_log_path = os.path.join(
            config("logging").get("BASE_LOG_DIR"),
            config('logging').get("yatai_web_server_log_filename"),
        )

        ensure_node_available_or_raise()
        yatai_grpc_server_address = f'localhost:{grpc_port}'
        async_start_yatai_service_web_ui(
            yatai_grpc_server_address, ui_port, web_ui_log_path, debug_mode, base_url
        )

    # We don't import _echo function from click_utils because of circular dep
    if with_ui:
        if debug_mode is True:
            ui_port = 8080
        web_ui_link = f'http://127.0.0.1:{ui_port}'
        if base_url != '.':
            web_ui_link += f'/{base_url}'
        web_ui_message = f'running on {web_ui_link}'
    else:
        web_ui_message = 'off'

    click.echo(
        f'* Starting BentoML YataiService gRPC Server\n'
        f'* Debug mode: { "on" if debug_mode else "off"}\n'
        f'* Web UI: {web_ui_message}\n'
        f'* Running on 127.0.0.1:{grpc_port} (Press CTRL+C to quit)\n'
        f'* Help and instructions: '
        f'https://docs.bentoml.org/en/latest/guides/yatai_service.html\n'
        f'{f"* Web server log can be found here: {web_ui_log_path}" if with_ui else ""}'
        f'\n-----\n'
        f'* Usage in Python:\n'
        f'*  bento_svc.save(yatai_url="127.0.0.1:{grpc_port}")\n'
        f'*  from bentoml.yatai.client import get_yatai_client\n'
        f'*  get_yatai_client("127.0.0.1:{grpc_port}").repository.list()\n'
        f'* Usage in CLI:\n'
        f'*  bentoml list --yatai-url=127.0.0.1:{grpc_port}\n'
        f'*  bentoml containerize IrisClassifier:latest --yatai-url=127.0.0.1:'
        f'{grpc_port}\n'
        f'*  bentoml push IrisClassifier:20200918001645_CD2886 --yatai-url=127.0.0.1:'
        f'{grpc_port}\n'
        f'*  bentoml pull IrisClassifier:20200918001645_CD2886 --yatai-url=127.0.0.1:'
        f'{grpc_port}\n'
        f'*  bentoml retrieve IrisClassifier:20200918001645_CD2886 '
        f'--yatai-url=127.0.0.1:{grpc_port} --target_dir="/tmp/foo/bar"\n'
        f'*  bentoml delete IrisClassifier:20200918001645_CD2886 '
        f'--yatai-url=127.0.0.1:{grpc_port}\n'
        # TODO: simplify the example usage here once related documentation is ready
    )

    try:
        while True:
            time.sleep(_ONE_DAY_IN_SECONDS)
    except KeyboardInterrupt:
        logger.info("Terminating YataiService gRPC server..")
        server.stop(grace=None)