def serving_entrypoint(): """Start Inference Server. NOTE: If the inference server is multi-model, MxNet Model Server will be used as the base server. Otherwise, GUnicorn is used as the base server. """ if is_multi_model(): start_mxnet_model_server() else: server.start(env.ServingEnv().framework_module)
def main(): """Placeholder docstring""" server.start(env.ServingEnv().framework_module)
def worker(): server.start(env.ServingEnv().framework_module)