def load(bento_service_cls, path=None): # TODO: add model.env.verify() to check dependencies and python version etc if bento_service_cls._bento_module_path is not None: # When calling load from pip installled bento model, use installed # python package for loading and the same path for '/artifacts' # TODO: warn user that 'path' parameter is ignored if it's not None here path = bento_service_cls._bento_module_path artifacts_path = path else: if path is None: raise BentoMLException("Loading path is required for BentoArchive: {}.".format( bento_service_cls.name())) # When calling load on generated archive directory, look for /artifacts # directory under module sub-directory if is_s3_url(path): temporary_path = tempfile.mkdtemp() download_from_s3(path, temporary_path) # Use loacl temp path for the following loading operations path = temporary_path artifacts_path = os.path.join(path, bento_service_cls.name()) bentoml_config = load_bentoml_config(path) bento_service = bento_service_cls.load(artifacts_path) bento_service._version = bentoml_config['service_version'] return bento_service
def resolve_bundle_path(bento, pip_installed_bundle_path): if pip_installed_bundle_path: assert ( bento is None ), "pip installed BentoService commands should not have Bento argument" return pip_installed_bundle_path if os.path.isdir(bento) or is_s3_url(bento): # bundler already support loading local and s3 path return bento elif ":" in bento: # assuming passing in BentoService in the form of Name:Version tag yatai_client = YataiClient() name, version = bento.split(':') get_bento_result = yatai_client.repository.get(name, version) if get_bento_result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( get_bento_result.status) raise BentoMLException( f'BentoService {name}:{version} not found - ' f'{error_code}:{error_message}') return get_bento_result.bento.uri.uri else: raise BentoMLException( f'BentoService "{bento}" not found - either specify the file path of ' f'the BentoService saved bundle, or the BentoService id in the form of ' f'"name:version"')
def resolve_bundle_path(bento, pip_installed_bundle_path, yatai_url=None): from bentoml.yatai.client import get_yatai_client from bentoml.exceptions import BentoMLException if pip_installed_bundle_path: assert ( bento is None ), "pip installed BentoService commands should not have Bento argument" return pip_installed_bundle_path if os.path.isdir(bento) or is_s3_url(bento) or is_gcs_url(bento): # saved_bundle already support loading local, s3 path and gcs path return bento elif ":" in bento: # assuming passing in BentoService in the form of Name:Version tag yatai_client = get_yatai_client(yatai_url) bento_pb = yatai_client.repository.get(bento) if bento_pb.uri.s3_presigned_url: # Use s3 presigned URL for downloading the repository if it is presented return bento_pb.uri.s3_presigned_url if bento_pb.uri.gcs_presigned_url: return bento_pb.uri.gcs_presigned_url else: return bento_pb.uri.uri else: raise BentoMLException( f'BentoService "{bento}" not found - either specify the file path of ' f"the BentoService saved bundle, or the BentoService id in the form of " f'"name:version"')
def _resolve_remote_bundle_path(bundle_path): if is_s3_url(bundle_path): import boto3 parsed_url = urlparse(bundle_path) bucket_name = parsed_url.netloc object_name = parsed_url.path.lstrip('/') s3 = boto3.client('s3') fileobj = io.BytesIO() s3.download_fileobj(bucket_name, object_name, fileobj) fileobj.seek(0, 0) elif _is_http_url(bundle_path): import requests response = requests.get(bundle_path) fileobj = io.BytesIO() fileobj.write(response.content) fileobj.seek(0, 0) else: raise BentoMLException( f"Saved bundle path: '{bundle_path}' is not supported") with tarfile.open(mode="r:gz", fileobj=fileobj) as tar: with tempfile.TemporaryDirectory() as tmpdir: filename = tar.getmembers()[0].name tar.extractall(path=tmpdir) yield os.path.join(tmpdir, filename)
def load_bento_service_class(archive_path): """ Load a BentoService class from saved archive in given path :param archive_path: A BentoArchive path generated from BentoService.save call or the path to pip installed BentoArchive directory :return: BentoService class """ if is_s3_url(archive_path): tempdir = tempfile.mkdtemp() download_from_s3(archive_path, tempdir) archive_path = tempdir config = load_bentoml_config(archive_path) # Load target module containing BentoService class from given path module_file_path = os.path.join(archive_path, config['service_name'], config['module_file']) if not os.path.isfile(module_file_path): # Try loading without service_name prefix, for loading from a installed PyPi module_file_path = os.path.join(archive_path, config['module_file']) if not os.path.isfile(module_file_path): raise BentoMLException( 'Can not locate module_file {} in archive {}'.format( config['module_file'], archive_path)) # Prepend archive_path to sys.path for loading extra python dependencies sys.path.insert(0, archive_path) module_name = config['module_name'] if module_name in sys.modules: # module already loaded, TODO: add warning module = sys.modules[module_name] elif sys.version_info >= (3, 5): import importlib.util spec = importlib.util.spec_from_file_location(module_name, module_file_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) elif sys.version_info >= (3, 3): from importlib.machinery import SourceFileLoader # pylint:disable=deprecated-method module = SourceFileLoader(module_name, module_file_path).load_module(module_name) # pylint:enable=deprecated-method else: import imp module = imp.load_source(module_name, module_file_path) # Remove archive_path from sys.path to avoid import naming conflicts sys.path.remove(archive_path) model_service_class = module.__getattribute__(config['service_name']) # Set _bento_archive_path, which tells BentoService where to load its artifacts model_service_class._bento_archive_path = archive_path # Set cls._version, service instance can access it via svc.version model_service_class._bento_service_version = config['service_version'] return model_service_class
def resolve_bundle_path( bento: str, pip_installed_bundle_path: Optional[str] = None, yatai_url: Optional[str] = None, ) -> str: from bentoml.exceptions import BentoMLException from bentoml.yatai.client import get_yatai_client if pip_installed_bundle_path: assert ( bento is None ), "pip installed BentoService commands should not have Bento argument" return pip_installed_bundle_path if os.path.isdir(bento) or is_s3_url(bento) or is_gcs_url(bento): # saved_bundle already support loading local, s3 path and gcs path return bento elif ":" in bento: # assuming passing in BentoService in the form of Name:Version tag yatai_client = get_yatai_client(yatai_url) bento_pb = yatai_client.repository.get(bento) return resolve_bento_bundle_uri(bento_pb) else: raise BentoMLException( f'BentoService "{bento}" not found - either specify the file path of ' f"the BentoService saved bundle, or the BentoService id in the form of " f'"name:version"')
def from_archive(cls, path): from bentoml.archive import load_bentoml_config # TODO: add model.env.verify() to check dependencies and python version etc if cls._bento_archive_path is not None and cls._bento_archive_path != path: raise BentoMLException( "Loaded BentoArchive(from {}) can't be loaded again from a different" "archive path {}".format(cls._bento_archive_path, path)) if is_s3_url(path): temporary_path = tempfile.mkdtemp() download_from_s3(path, temporary_path) # Use loacl temp path for the following loading operations path = temporary_path artifacts_path = path # For pip installed BentoService, artifacts directory is located at # 'package_path/artifacts/', but for loading from BentoArchive, it is # in 'path/{service_name}/artifacts/' if not os.path.isdir(os.path.join(path, 'artifacts')): artifacts_path = os.path.join(path, cls.name()) bentoml_config = load_bentoml_config(path) # TODO: check archive type and allow loading archive only if bentoml_config['service_name'] != cls.name(): raise BentoMLException( 'BentoService name does not match with BentoML Archive in path: {}' .format(path)) artifacts = ArtifactCollection.load(artifacts_path, cls._artifacts_spec) svc = cls(artifacts) return svc
def save(bento_service, dst, version=None): """ Save given BentoService along with all artifacts to target path """ if version is None: version = _generate_new_version_str() _validate_version_str(version) if bento_service._version_major is not None and bento_service._version_minor is not None: # BentoML uses semantic versioning for BentoService distribution # when user specified the MAJOR and MINOR version number along with # the BentoService class definition with '@ver' decorator. # The parameter version(or auto generated version) here will be used as # PATCH field in the final version: version = '.'.join( [str(bento_service._version_major), str(bento_service._version_minor), version]) # Full path containing saved BentoArchive, it the dst path with service name # and service version as prefix. e.g.: # - s3://my-bucket/base_path => s3://my-bucket/base_path/service_name/version/ # - /tmp/my_bento_archive/ => /tmp/my_bento_archive/service_name/version/ full_saved_path = os.path.join(dst, bento_service.name, version) if is_s3_url(dst): with TempDirectory() as tempdir: _save(bento_service, tempdir, version) upload_to_s3(full_saved_path, tempdir) else: _save(bento_service, dst, version) return full_saved_path
def __init__(self, base_url=None): if base_url is None: base_url = config().get('default_repository_base_url') if is_s3_url(base_url): self._repo = _S3BentoRepository(base_url) else: self._repo = _LocalBentoRepository(base_url)
def handle_cli(self, args, func): parser = argparse.ArgumentParser() parser.add_argument("--input", required=True) parser.add_argument("-o", "--output", default="str", choices=["str", "json"]) parser.add_argument( "--orient", default=self.orient, choices=PANDAS_DATAFRAME_TO_DICT_ORIENT_OPTIONS, ) parser.add_argument( "--output_orient", default=self.output_orient, choices=PANDAS_DATAFRAME_TO_DICT_ORIENT_OPTIONS, ) parsed_args = parser.parse_args(args) orient = parsed_args.orient output_orient = parsed_args.output_orient cli_input = parsed_args.input if os.path.isfile(cli_input) or is_s3_url(cli_input) or is_url( cli_input): if cli_input.endswith(".csv"): df = pd.read_csv(cli_input) elif cli_input.endswith(".json"): df = pd.read_json(cli_input, orient=orient, typ=self.typ, dtype=False) else: raise BadInput( "Input file format not supported, BentoML cli only accepts .json " "and .csv file") else: # Assuming input string is JSON format try: df = pd.read_json(cli_input, orient=orient, typ=self.typ, dtype=False) except ValueError as e: raise BadInput( "Unexpected input format, BentoML DataframeHandler expects json " "string as input: {}".format(e)) if self.typ == "frame" and self.input_dtypes is not None: _check_dataframe_column_contains(self.input_dtypes, df) result = func(df) if parsed_args.output == 'json': result = api_func_result_to_json( result, pandas_dataframe_orient=output_orient) else: result = str(result) print(result)
def yatai_service_start( db_url, repo_base_url, grpc_port, ui_port, ui, web_prefix_path, repository_type, file_system_directory, s3_url, s3_endpoint_url, gcs_url, ): from bentoml.utils.s3 import is_s3_url from bentoml.utils.gcs import is_gcs_url if repo_base_url: logger.warning( "Option --repo-base-url has been deprecated but is still supported " "in the current release. Consider using --repository-type and its " "corresponding options in the upcoming releases. ") if is_s3_url(repo_base_url): repository_type = YATAI_REPOSITORY_S3 s3_url = repo_base_url elif is_gcs_url(repo_base_url): repository_type = YATAI_REPOSITORY_GCS gcs_url = repo_base_url else: repository_type = YATAI_REPOSITORY_FILE_SYSTEM file_system_directory = repo_base_url if repository_type == YATAI_REPOSITORY_S3 and s3_url is None: logger.error("'--s3-url' must be specified for S3 repository type") return elif repository_type == YATAI_REPOSITORY_GCS and gcs_url is None: logger.error( "'--gcs-url' must be specified for GCS repository type") return elif (repository_type == YATAI_REPOSITORY_FILE_SYSTEM and file_system_directory is None): logger.error( "'--file-system-directory' must be specified for file system " "repository type") return else: start_yatai_service_grpc_server( db_url=db_url, grpc_port=grpc_port, ui_port=ui_port, with_ui=ui, base_url=web_prefix_path, repository_type=repository_type, file_system_directory=file_system_directory, s3_url=s3_url, s3_endpoint_url=s3_endpoint_url, gcs_url=gcs_url, )
def load_model_service(model_path): if is_s3_url(model_path): temp_dir = tempfile.mkdtemp() download_from_s3(model_path, temp_dir) model_path = temp_dir else: if not os.path.isabs(model_path): model_path = os.path.abspath(model_path) model_service = load(model_path) return model_service
def __init__(self, base_url=None, s3_endpoint_url=None): """ :param base_url: either a local file system path or a s3-compatible path such as s3://my-bucket/some-prefix/ :param s3_endpoint_url: configuring S3Repository to talk to a specific s3 endpoint """ if base_url is None: base_url = config().get('default_repository_base_url') if is_s3_url(base_url): self._repo = S3Repository(base_url, s3_endpoint_url) else: self._repo = LocalRepository(base_url)
def save(bento_service, dst, version=None): """Save given BentoService along with all its artifacts, source code and dependencies to target path Args: bento_service (bentoml.service.BentoService): a Bento Service instance dst (str): Destination of where the bento service will be saved. It could be a local file path or a s3 path version (:obj:`str`, optional): version text to use for saved archive Returns: string: The complete path of saved Bento service. """ if version is None: version = _generate_new_version_str() _validate_version_str(version) if (bento_service._version_major is not None and bento_service._version_minor is not None): # BentoML uses semantic versioning for BentoService distribution # when user specified the MAJOR and MINOR version number along with # the BentoService class definition with '@ver' decorator. # The parameter version(or auto generated version) here will be used as # PATCH field in the final version: version = ".".join([ str(bento_service._version_major), str(bento_service._version_minor), version, ]) # Full path containing saved BentoArchive, it the dst path with service name # and service version as prefix. e.g.: # - s3://my-bucket/base_path => s3://my-bucket/base_path/service_name/version/ # - /tmp/my_bento_archive/ => /tmp/my_bento_archive/service_name/version/ full_saved_path = os.path.join(dst, bento_service.name, version) if is_s3_url(dst): with TempDirectory() as tempdir: _save(bento_service, tempdir, version) upload_to_s3(full_saved_path, tempdir) else: _save(bento_service, dst, version) LOG.info("BentoService %s:%s saved to %s", bento_service.name, version, full_saved_path) return full_saved_path
def handle_cli(self, args, func): parser = argparse.ArgumentParser() parser.add_argument('--input', required=True) parser.add_argument('-o', '--output', default="str", choices=['str', 'json', 'yaml']) parser.add_argument('--orient', default=self.orient) parser.add_argument('--output_orient', default=self.output_orient) parsed_args = parser.parse_args(args) orient = parsed_args.orient output_orient = parsed_args.output_orient cli_input = parsed_args.input if os.path.isfile(cli_input) or is_s3_url(cli_input) or is_url( cli_input): if cli_input.endswith('.csv'): df = pd.read_csv(cli_input) elif cli_input.endswith('.json'): df = pd.read_json(cli_input, orient=orient, typ=self.typ, dtype=False) else: raise ValueError( "Input file format not supported, BentoML cli only accepts .json and .csv file" ) else: # Assuming input string is JSON format try: df = pd.read_json(cli_input, orient=orient, typ=self.typ, dtype=False) except ValueError as e: raise ValueError( "Unexpected input format, BentoML DataframeHandler expects json string as" "input: {}".format(e)) if self.typ == 'frame' and self.input_columns is not None: check_dataframe_column_contains(self.input_columns, df) result = func(df) result = get_output_str(result, parsed_args.output, output_orient) print(result)
def load(path, lazy_load=False): """ Load a BentoService or BentoModel from saved archive in given path :param path: A BentoArchive path generated from BentoService.save call :return: BentoService """ if is_s3_url(path): tempdir = tempfile.mkdtemp() download_from_s3(path, tempdir) path = tempdir config = load_bentoml_config(path) # Load target module containing BentoService class from given path module_file_path = os.path.join(path, config['service_name'], config['module_file']) module_name = config['module_name'] if module_name in sys.modules: # module already loaded, TODO: add warning module = sys.modules[module_name] elif sys.version_info >= (3, 5): import importlib.util spec = importlib.util.spec_from_file_location(module_name, module_file_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) elif sys.version_info >= (3, 3): from importlib.machinery import SourceFileLoader # pylint:disable=deprecated-method module = SourceFileLoader(module_name, module_file_path).load_module(module_name) # pylint:enable=deprecated-method else: import imp module = imp.load_source(module_name, module_file_path) model_service_class = module.__getattribute__(config['service_name']) loaded_model = _LoadedBentoServiceWrapper(model_service_class, path, config) if not lazy_load: loaded_model.load() return loaded_model
def _resolve_remote_bundle_path(bundle_path): if is_s3_url(bundle_path): import boto3 parsed_url = urlparse(bundle_path) bucket_name = parsed_url.netloc object_name = parsed_url.path.lstrip('/') s3 = boto3.client('s3') fileobj = io.BytesIO() s3.download_fileobj(bucket_name, object_name, fileobj) fileobj.seek(0, 0) elif is_gcs_url(bundle_path): try: from google.cloud import storage except ImportError: raise BentoMLException( '"google-cloud-storage" package is required. You can install it with ' 'pip: "pip install google-cloud-storage"' ) gcs = storage.Client() fileobj = io.BytesIO() gcs.download_blob_to_file(bundle_path, fileobj) fileobj.seek(0, 0) elif _is_http_url(bundle_path): import requests response = requests.get(bundle_path) if response.status_code != 200: raise BentoMLException( f"Error retrieving BentoService bundle. " f"{response.status_code}: {response.text}" ) fileobj = io.BytesIO() fileobj.write(response.content) fileobj.seek(0, 0) else: raise BentoMLException(f"Saved bundle path: '{bundle_path}' is not supported") with tarfile.open(mode="r:gz", fileobj=fileobj) as tar: with tempfile.TemporaryDirectory() as tmpdir: filename = tar.getmembers()[0].name tar.extractall(path=tmpdir) yield os.path.join(tmpdir, filename)
def resolve_bundle_path(bento, pip_installed_bundle_path): from bentoml.exceptions import BentoMLException if pip_installed_bundle_path: assert ( bento is None ), "pip installed BentoService commands should not have Bento argument" return pip_installed_bundle_path if os.path.isdir(bento) or is_s3_url(bento) or is_gcs_url(bento): # saved_bundle already support loading local, s3 path and gcs path return bento elif ":" in bento: # assuming passing in BentoService in the form of Name:Version tag yatai_client = get_default_yatai_client() name, version = bento.split(":") get_bento_result = yatai_client.repository.get(name, version) if get_bento_result.status.status_code != yatai_proto.status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( get_bento_result.status ) raise BentoMLException( f"BentoService {name}:{version} not found - " f"{error_code}:{error_message}" ) if get_bento_result.bento.uri.s3_presigned_url: # Use s3 presigned URL for downloading the repository if it is presented return get_bento_result.bento.uri.s3_presigned_url if get_bento_result.bento.uri.gcs_presigned_url: return get_bento_result.bento.uri.gcs_presigned_url else: return get_bento_result.bento.uri.uri else: raise BentoMLException( f'BentoService "{bento}" not found - either specify the file path of ' f"the BentoService saved bundle, or the BentoService id in the form of " f'"name:version"' )
def handle_cli(self, args, func): parser = argparse.ArgumentParser() parser.add_argument("--input", required=True) parser.add_argument( "--orient", default=self.orient, choices=PANDAS_DATAFRAME_TO_JSON_ORIENT_OPTIONS, ) parsed_args, unknown_args = parser.parse_known_args(args) orient = parsed_args.orient cli_input = parsed_args.input if os.path.isfile(cli_input) or is_s3_url(cli_input) or is_url(cli_input): if cli_input.endswith(".csv"): df = pd.read_csv(cli_input) elif cli_input.endswith(".json"): df = pd.read_json(cli_input, orient=orient, typ=self.typ) else: raise BadInput( "Input file format not supported, BentoML cli only accepts .json " "and .csv file" ) else: # Assuming input string is JSON format try: df = pd.read_json(cli_input, orient=orient, typ=self.typ) except ValueError as e: raise BadInput( "Unexpected input format, BentoML DataframeInput expects json " "string as input: {}".format(e) ) if self.typ == "frame" and self.input_dtypes is not None: check_dataframe_column_contains(self.input_dtypes, df) result = func(df) self.output_adapter.to_cli(result, unknown_args)
def save(bento_service, dst, version=None, pypi_package_version="1.0.0"): """ Save given BentoService along with all artifacts to target path """ if version is None: version = _generate_new_version_str() _validate_version_str(version) # Full path containing saved BentoArchive, it the dst path with service name # and service version as prefix. e.g.: # - s3://my-bucket/base_path => s3://my-bucket/base_path/service_name/version/ # - /tmp/my_bento_archive/ => /tmp/my_bento_archive/service_name/version/ full_saved_path = os.path.join(dst, bento_service.name, version) if is_s3_url(dst): with TempDirectory() as tempdir: _save(bento_service, tempdir, version, pypi_package_version) upload_to_s3(full_saved_path, tempdir) else: _save(bento_service, dst, version, pypi_package_version) return full_saved_path
def from_archive(cls, path): from bentoml.archive import load_bentoml_config if cls._bento_archive_path is not None and cls._bento_archive_path != path: raise BentoMLException( "Loaded BentoArchive(from {}) can't be loaded again from a different" "archive path {}".format(cls._bento_archive_path, path)) if is_s3_url(path): temporary_path = tempfile.mkdtemp() download_from_s3(path, temporary_path) # Use loacl temp path for the following loading operations path = temporary_path artifacts_path = path # For pip installed BentoService, artifacts directory is located at # 'package_path/artifacts/', but for loading from BentoArchive, it is # in 'path/{service_name}/artifacts/' if not os.path.isdir(os.path.join(path, "artifacts")): artifacts_path = os.path.join(path, cls.name()) bentoml_config = load_bentoml_config(path) if bentoml_config["metadata"]["service_name"] != cls.name(): raise BentoMLException( "BentoService name does not match with BentoArchive in path: {}" .format(path)) if bentoml_config["kind"] != "BentoService": raise BentoMLException( "BentoArchive type '{}' can not be loaded as a BentoService". format(bentoml_config["kind"])) artifacts = ArtifactCollection.load(artifacts_path, cls._artifacts_spec) svc = cls(artifacts) return svc
def _is_remote_path(bundle_path): return is_s3_url(bundle_path)
def _is_remote_path(bundle_path): return isinstance(bundle_path, str) and (is_s3_url(bundle_path) or is_gcs_url(bundle_path) or _is_http_url(bundle_path))
def _is_remote_path(bundle_path): return ( is_s3_url(bundle_path) or is_gcs_url(bundle_path) or _is_http_url(bundle_path) )
def save(bento_service, dst, version=None, pypi_package_version="1.0.0"): """ Save given BentoService along with all artifacts to target path """ if version is None: version = _generate_new_version_str() _validate_version_str(version) s3_url = None if is_s3_url(dst): s3_url = os.path.join(dst, bento_service.name, version) # TODO: check s3_url not exist, otherwise raise exception temp_dir = tempfile.mkdtemp() Path(temp_dir, bento_service.name).mkdir(parents=True, exist_ok=True) # Update path to subfolder in the form of 'base/service_name/version/' path = os.path.join(temp_dir, bento_service.name, version) else: Path(os.path.join(dst), bento_service.name).mkdir(parents=True, exist_ok=True) # Update path to subfolder in the form of 'base/service_name/version/' path = os.path.join(dst, bento_service.name, version) if os.path.exists(path): raise ValueError("Version {version} in Path: {dst} already " "exist.".format(version=version, dst=dst)) os.mkdir(path) module_base_path = os.path.join(path, bento_service.name) os.mkdir(module_base_path) # write README.md with user model's docstring if bento_service.__class__.__doc__: model_description = bento_service.__class__.__doc__.strip() else: model_description = DEFAULT_BENTO_ARCHIVE_DESCRIPTION with open(os.path.join(path, 'README.md'), 'w') as f: f.write(model_description) # save all model artifacts to 'base_path/name/artifacts/' directory bento_service.artifacts.save(module_base_path) # write conda environment, requirement.txt bento_service.env.save(path) # TODO: add bentoml.find_packages helper for more fine grained control over # this process, e.g. packages=find_packages(base, [], exclude=[], used_module_only=True) # copy over all custom model code module_name, module_file = copy_used_py_modules(bento_service.__class__.__module__, os.path.join(path, bento_service.name)) if os.path.isabs(module_file): module_file = module_name.replace('.', os.sep) + '.py' # create __init__.py with open(os.path.join(path, bento_service.name, '__init__.py'), "w") as f: f.write( INIT_PY_TEMPLATE.format(service_name=bento_service.name, module_name=module_name, pypi_package_version=pypi_package_version)) # write setup.py, make exported model pip installable setup_py_content = BENTO_MODEL_SETUP_PY_TEMPLATE.format( name=bento_service.name, pypi_package_version=pypi_package_version, long_description=model_description) with open(os.path.join(path, 'setup.py'), 'w') as f: f.write(setup_py_content) with open(os.path.join(path, 'MANIFEST.in'), 'w') as f: f.write(MANIFEST_IN_TEMPLATE.format(service_name=bento_service.name)) # write Dockerfile with open(os.path.join(path, 'Dockerfile'), 'w') as f: f.write( BENTO_SERVER_SINGLE_MODEL_DOCKERFILE_TEMPLATE.format( conda_env_name=bento_service.env.get_conda_env_name())) # write bentoml.yml bentoml_yml_content = BENTOML_CONFIG_YAML_TEMPLATE.format( service_name=bento_service.name, bentoml_version=BENTOML_VERSION, service_version=version, module_name=module_name, module_file=module_file, created_at=str(datetime.now())) with open(os.path.join(path, 'bentoml.yml'), 'w') as f: f.write(bentoml_yml_content) # Also write bentoml.yml to module base path to make it accessible # as package data after pip installed as a python package with open(os.path.join(module_base_path, 'bentoml.yml'), 'w') as f: f.write(bentoml_yml_content) if s3_url: upload_to_s3(s3_url, path) return s3_url else: return path