def set_requirements_txt(self, requirements_txt_path): requirements_txt_file = Path(requirements_txt_path) with requirements_txt_file.open("rb") as f: content = f.read() module_list = content.decode("utf-8").split("\n") self._pip_dependencies += module_list
def set_setup_sh(self, setup_sh_path_or_content): setup_sh_file = Path(setup_sh_path_or_content) if setup_sh_file.is_file(): with setup_sh_file.open("rb") as f: self._setup_sh = f.read() else: self._setup_sh = setup_sh_path_or_content.encode("utf-8")
def set_requirements_txt(self, requirements_txt_path_or_content): requirements_txt_file = Path(requirements_txt_path_or_content) if requirements_txt_file.is_file(): with requirements_txt_file.open('rb') as f: self._requirements_txt = f.read() else: self._requirements_txt = requirements_txt_path_or_content.encode('utf-8')
def load_config(): global BENTOML_HOME # pylint: disable=global-statement try: Path(BENTOML_HOME).mkdir(exist_ok=True) except OSError as err: raise BentoMLConfigException( "Error creating bentoml home directory '{}': {}".format( BENTOML_HOME, err.strerror)) with open(DEFAULT_CONFIG_FILE, "rb") as f: DEFAULT_CONFIG = f.read().decode("utf-8") loaded_config = BentoMLConfigParser( default_config=parameterized_config(DEFAULT_CONFIG)) local_config_file = get_local_config_file() if os.path.isfile(local_config_file): logger.info("Loading local BentoML config file: %s", local_config_file) with open(local_config_file, "rb") as f: loaded_config.read_string( parameterized_config(f.read().decode("utf-8"))) else: logger.info( "No local BentoML config file found, using default configurations") return loaded_config
def generate_serverless_configuration_for_google(bento_service, apis, output_path, additional_options): config_path = os.path.join(output_path, 'serverless.yml') yaml = YAML() with open(config_path, 'r') as f: content = f.read() serverless_config = yaml.load(content) serverless_config['provider']['project'] = bento_service.name if additional_options.get('region', None): serverless_config['provider']['region'] = additional_options['region'] logger.info(('Using user defined Google region: {0}', additional_options['region'])) if additional_options.get('stage', None): serverless_config['provider']['stage'] = additional_options['stage'] logger.info(('Using user defined Google stage: {0}', additional_options['stage'])) serverless_config['functions'] = {} for api in apis: if api.name == 'first': user_function_with_first_name = True function_config = {'handler': api.name, 'events': [{'http': 'path'}]} serverless_config['functions'][api.name] = function_config yaml.dump(serverless_config, Path(config_path)) return
def build_template_dockerfile(bento_service, dst): """ Create a base docker image for the given bento_service """ version = _generate_new_version_str() Path(os.path.join(dst), bento_service.name).mkdir(parents=True, exist_ok=True) # Update path to subfolder in the form of 'base/service_name/version/' path = os.path.join(dst, bento_service.name, version) if os.path.exists(path): raise ValueError("Template in Path: {dst} already " "exist.".format(dst=dst)) os.mkdir(path) module_base_path = os.path.join(path, bento_service.name) os.mkdir(module_base_path) # write conda environment, requirement.txt bento_service.env.save(path) # write Dockerfile with open(os.path.join(path, 'Dockerfile'), 'w') as f: f.write(BENTO_SERVICE_DOCKERFILE_BASE_TEMPLATE) return path
def generate_serverless_configuration_for_aws_lambda(service_name, apis, output_path, region, stage): config_path = os.path.join(output_path, "serverless.yml") yaml = YAML() with open(config_path, "r") as f: content = f.read() serverless_config = yaml.load(content) serverless_config["service"] = service_name serverless_config["provider"]["region"] = region logger.info("Using user AWS region: %s", region) serverless_config["provider"]["stage"] = stage logger.info("Using AWS stage: %s", stage) serverless_config["functions"] = generate_aws_handler_functions_config( apis) serverless_config["custom"] = { "apigwBinary": ["image/jpg", "image/jpeg", "image/png"], "pythonRequirements": { "useDownloadCache": True, "useStaticCache": True, "dockerizePip": True, "slim": True, "strip": True, "zip": True, }, } yaml.dump(serverless_config, Path(config_path)) return
def download_from_s3(s3_url, file_path): """ Download files from given s3_path and store in the given file path """ parse_result = urlparse(s3_url) bucket = parse_result.netloc base_path = parse_result.path if base_path.startswith('/'): base_path = base_path[1:] s3_client = boto3.client("s3") list_object_result = s3_client.list_objects(Bucket=bucket, Prefix=base_path) result_content = list_object_result["Contents"] for content in result_content: if len(base_path) == len(content["Key"]): # single file local_file_path = os.path.join(file_path, base_path.split('/')[-1]) else: relative_file_path = content["Key"][len(base_path) + 1 :] # noqa: E203 if not relative_file_path: continue elif relative_file_path.endswith('/'): local_dir_path = os.path.join(file_path, relative_file_path) Path(os.path.dirname(local_dir_path)).mkdir(parents=True, exist_ok=True) continue else: local_file_path = os.path.join(file_path, relative_file_path) if local_file_path: s3_client.download_file( Bucket=bucket, Key=content["Key"], Filename=local_file_path )
def generate(self): serverless_config = { "service": self.deployment_name, "provider": { "region": self.region, "stage": self.stage, "name": self.provider_name, }, "functions": self.functions, } # if self.platform == "google-python": # serverless_config["provider"]["name"] = "google" # for api in apis: # serverless_config["functions"][api.name] = { # "handler": api.name, # "events": [{"http": "path"}], # } yaml = YAML() self.temp_directory.create() tempdir = self.temp_directory.path saved_path = os.path.join(tempdir, "serverless.yml") yaml.dump(serverless_config, Path(saved_path)) self.path = tempdir
def generate_gcp_function_serverless_config(deployment_name, api_names, serverless_project_dir, region, stage): config_path = os.path.join(serverless_project_dir, "serverless.yml") if os.path.isfile(config_path): os.remove(config_path) yaml = YAML() serverless_config = { "service": deployment_name, "provider": { "region": region, "stage": stage, "name": 'google', "project": deployment_name, }, "functions": { api_name: { "handler": api_name, "events": [{ "http": "path" }] } for api_name in api_names }, } yaml.dump(serverless_config, Path(config_path))
def update_serverless_configuration_for_aws(bento_service, output_path): yaml = YAML() api = bento_service.get_service_apis()[0] with open(output_path, 'r') as f: content = f.read() serverless_config = yaml.load(content) package_config = { 'include': ['handler.py', bento_service.name + '/*', 'requirements.txt'] } function_config = { 'handler': 'handler.predict', 'events': [{ 'http': { 'path': '/predict', 'method': 'post' } }] } serverless_config['package'] = package_config serverless_config['functions'][api.name] = function_config del serverless_config['functions']['hello'] yaml.dump(serverless_config, Path(output_path)) return
def check_status(self): """Check deployment status for the bentoml service. return True, if it is active else return false """ apis = self.bento_service.get_service_apis() config = { "service": self.bento_service.name, "provider": { "region": self.region, "stage": self.stage }, "functions": {} } if self.platform == 'google-python': config['provider']['name'] = 'google' for api in apis: config['functions'][api.name] = { 'handler': api.name, 'events': [{ 'http': 'path' }] } elif self.platform == 'aws-lambda' or self.platform == 'aws-lambda-py2': config['provider']['name'] = 'aws' for api in apis: config['functions'][api.name] = { 'handler': 'handler.' + api.name, 'events': [{ 'http': { "path": '/' + api.name, "method": 'post' } }] } else: raise BentoMLException( 'check serverless does not support platform %s at the moment' % self.platform) yaml = YAML() with TempDirectory() as tempdir: saved_path = os.path.join(tempdir, 'serverless.yml') yaml.dump(config, Path(saved_path)) with subprocess.Popen(['serverless', 'info'], cwd=tempdir, stdout=PIPE, stderr=PIPE) as proc: # We don't use the parse_response function here. # Instead of raising error, we will just return false content = proc.stdout.read().decode('utf-8') response = content.strip().split('\n') logger.debug('Serverless response: %s', '\n'.join(response)) error = [s for s in response if 'Serverless Error' in s] if error: return False, '\n'.join(response) else: return True, '\n'.join(response)
def check_status(self): """Check deployment status for the bentoml service. return True, if it is active else return false """ apis = self.bento_service.get_service_apis() config = { "service": self.bento_service.name, "provider": { "region": self.region, "stage": self.stage }, "functions": {}, } if self.platform == "google-python": config["provider"]["name"] = "google" for api in apis: config["functions"][api.name] = { "handler": api.name, "events": [{ "http": "path" }], } elif self.platform == "aws-lambda" or self.platform == "aws-lambda-py2": config["provider"]["name"] = "aws" for api in apis: config["functions"][api.name] = { "handler": "handler." + api.name, "events": [{ "http": { "path": "/" + api.name, "method": "post" } }], } else: raise BentoMLException( "check serverless does not support platform %s at the moment" % self.platform) yaml = YAML() with TempDirectory() as tempdir: saved_path = os.path.join(tempdir, "serverless.yml") yaml.dump(config, Path(saved_path)) with subprocess.Popen(["serverless", "info"], cwd=tempdir, stdout=PIPE, stderr=PIPE) as proc: # We don't use the parse_response function here. # Instead of raising error, we will just return false content = proc.stdout.read().decode("utf-8") response = content.strip().split("\n") logger.debug("Serverless response: %s", "\n".join(response)) error = [s for s in response if "Serverless Error" in s] if error: return False, "\n".join(response) else: return True, "\n".join(response)
def configure_logging(logging_level=None): if logging_level is None: logging_level = config("logging").get("LOGGING_LEVEL").upper() base_log_dir = os.path.expanduser(config("logging").get("BASE_LOG_DIR")) Path(base_log_dir).mkdir(parents=True, exist_ok=True) logging_config = get_logging_config_dict(logging_level, base_log_dir) logging.config.dictConfig(logging_config)
def generate_aws_lambda_serverless_config( bento_python_version, deployment_name, api_names, serverless_project_dir, region, stage, ): config_path = os.path.join(serverless_project_dir, "serverless.yml") if os.path.isfile(config_path): os.remove(config_path) yaml = YAML() runtime = 'python3.7' if version.parse(bento_python_version) < version.parse('3.0.0'): runtime = 'python2.7' serverless_config = { "service": deployment_name, "provider": { "region": region, "stage": stage, "name": 'aws', 'runtime': runtime, "apiGateway": { "binaryMediaTypes": ['image/*'] } }, "functions": { api_name: { "handler": "handler." + api_name, "events": [{"http": {"path": "/" + api_name, "method": "post"}}], } for api_name in api_names }, "custom": { "pythonRequirements": { "useDownloadCache": True, "useStaticCache": True, "dockerizePip": True, "slim": True, "strip": True, "zip": True, # We are passing the bundled_pip_dependencies directory for python # requirement package, so it can installs the bundled tar gz file. "dockerRunCmdExtraArgs": [ '-v', '{}/bundled_pip_dependencies:' '/var/task/bundled_pip_dependencies:z'.format( serverless_project_dir ), ], }, }, } yaml.dump(serverless_config, Path(config_path))
def generate_serverless_configuration_for_aws(apis, output_path, additional_options): config_path = os.path.join(output_path, 'serverless.yml') yaml = YAML() with open(config_path, 'r') as f: content = f.read() serverless_config = yaml.load(content) if additional_options.get('region', None): serverless_config['provider']['region'] = additional_options['region'] logger.info(('Using user defined AWS region: {0}', additional_options['region'])) else: serverless_config['provider']['region'] = 'us-west-2' if additional_options.get('stage', None): serverless_config['provider']['stage'] = additional_options['stage'] logger.info( ('Using user defined AWS stage: {0}', additional_options['stage'])) else: serverless_config['provider']['stage'] = 'dev' serverless_config['functions'] = {} for api in apis: function_config = { 'handler': 'handler.{name}'.format(name=api.name), 'layers': ['{Ref: PythonRequirementsLambdaLayer}'], 'events': [{ 'http': { 'path': '/{name}'.format(name=api.name), 'method': 'post' } }] } serverless_config['functions'][api.name] = function_config custom_config = { 'apigwBinary': ['image/jpg', 'image/jpeg', 'image/png'], 'pythonRequirements': { 'useDownloadCache': True, 'useStaticCache': True, 'dockerizePip': True, 'layer': True, 'zip': True } } serverless_config['custom'] = custom_config yaml.dump(serverless_config, Path(config_path)) return
def generate_serverless_configuration_for_aws(service_name, apis, output_path, region, stage): config_path = os.path.join(output_path, 'serverless.yml') yaml = YAML() with open(config_path, 'r') as f: content = f.read() serverless_config = yaml.load(content) serverless_config['service'] = service_name serverless_config['provider']['region'] = region logger.info('Using user AWS region: %s', region) serverless_config['provider']['stage'] = stage logger.info('Using AWS stage: %s', stage) serverless_config['functions'] = {} for api in apis: function_config = { 'handler': 'handler.{name}'.format(name=api.name), 'events': [{ 'http': { 'path': '/{name}'.format(name=api.name), 'method': 'post' } }] } serverless_config['functions'][api.name] = function_config custom_config = { 'apigwBinary': ['image/jpg', 'image/jpeg', 'image/png'], 'pythonRequirements': { 'useDownloadCache': True, 'useStaticCache': True, 'dockerizePip': True, 'slim': True, 'strip': True, 'zip': True } } serverless_config['custom'] = custom_config package_config = { 'include': [ 'handler.py', service_name + '/**' ] } yaml.dump(serverless_config, Path(config_path)) return
def generate_serverless_configuration_for_aws_lambda(service_name, apis, output_path, region, stage): config_path = os.path.join(output_path, "serverless.yml") yaml = YAML() with open(config_path, "r") as f: content = f.read() serverless_config = yaml.load(content) serverless_config["service"] = service_name serverless_config["provider"]["region"] = region logger.info("Using user AWS region: %s", region) serverless_config["provider"]["stage"] = stage logger.info("Using AWS stage: %s", stage) serverless_config["functions"] = generate_aws_handler_functions_config( apis) # We are passing the bundled_pip_dependencies directory for python # requirement package, so it can installs the bundled tar gz file. serverless_config["custom"] = { "apigwBinary": ["image/jpg", "image/jpeg", "image/png"], "pythonRequirements": { "useDownloadCache": True, "useStaticCache": True, "dockerizePip": True, "slim": True, "strip": True, "zip": True, "dockerRunCmdExtraArgs": [ '-v', '{}/bundled_pip_dependencies:' '/var/task/bundled_pip_dependencies:z'.format(output_path), ], }, } yaml.dump(serverless_config, Path(config_path)) return
def generate_serverless_configuration_for_aws(service_name, apis, output_path, region, stage): config_path = os.path.join(output_path, "serverless.yml") yaml = YAML() with open(config_path, "r") as f: content = f.read() serverless_config = yaml.load(content) serverless_config["service"] = service_name serverless_config["provider"]["region"] = region logger.info("Using user AWS region: %s", region) serverless_config["provider"]["stage"] = stage logger.info("Using AWS stage: %s", stage) serverless_config["functions"] = {} for api in apis: function_config = { "handler": "handler.{name}".format(name=api.name), "events": [{ "http": { "path": "/{name}".format(name=api.name), "method": "post" } }], } serverless_config["functions"][api.name] = function_config custom_config = { "apigwBinary": ["image/jpg", "image/jpeg", "image/png"], "pythonRequirements": { "useDownloadCache": True, "useStaticCache": True, "dockerizePip": True, "slim": True, "strip": True, "zip": True, }, } serverless_config["custom"] = custom_config yaml.dump(serverless_config, Path(config_path)) return
def update_serverless_configuration_for_google(bento_service, output_path, extra_args): yaml = YAML() api = bento_service.get_service_apis()[0] with open(output_path, 'r') as f: content = f.read() serverless_config = yaml.load(content) if extra_args.region: serverless_config['provider']['region'] = extra_args.region if extra_args.stage: serverless_config['provider']['stage'] = extra_args.stage serverless_config['provider']['project'] = bento_service.name function_config = {'handler': api.name, 'events': [{'http': 'path'}]} serverless_config['functions'][api.name] = function_config del serverless_config['functions']['first'] yaml.dump(serverless_config, Path(output_path)) return
def generate_serverless_bundle(bento_service, platform, archive_path, additional_options): check_serverless_compatiable_version() provider = SERVERLESS_PROVIDER[platform] output_path = generate_bentoml_deployment_snapshot_path( bento_service.name, platform) Path(output_path).mkdir(parents=True, exist_ok=False) # Calling serverless command to generate templated project subprocess.call([ 'serverless', 'create', '--template', provider, '--name', bento_service.name ], cwd=output_path) if platform == 'google-python': create_gcp_function_bundle(bento_service, output_path, additional_options) elif platform == 'aws-lambda' or platform == 'aws-lambda-py2': # Installing two additional plugins to make it works for AWS lambda # serverless-python-requirements will packaging required python modules, and automatically # compress and create layer subprocess.call([ 'serverless', 'plugin', 'install', '-n', 'serverless-python-requirements' ], cwd=output_path) subprocess.call([ 'serverless', 'plugin', 'install', '-n', 'serverless-apigw-binary' ], cwd=output_path) create_aws_lambda_bundle(bento_service, output_path, additional_options) else: raise BentoMLException( ("{provider} is not supported in current version of BentoML", provider)) shutil.copy(os.path.join(archive_path, 'requirements.txt'), output_path) model_serivce_archive_path = os.path.join(output_path, bento_service.name) shutil.copytree(archive_path, model_serivce_archive_path) return os.path.realpath(output_path)
def _generate_bundle(self): output_path = generate_bentoml_deployment_snapshot_path( self.bento_service.name, self.bento_service.version, self.platform) Path(output_path).mkdir(parents=True, exist_ok=False) # Calling serverless command to generate templated project call_serverless_command( [ "serverless", "create", "--template", self.provider, "--name", self.bento_service.name, ], output_path, ) if self.platform == "google-python": create_gcp_function_bundle(self.bento_service, output_path, self.region, self.stage) elif self.platform == "aws-lambda" or self.platform == "aws-lambda-py2": # Installing two additional plugins to make it works for AWS lambda # serverless-python-requirements will packaging required python modules, # and automatically compress and create layer install_serverless_plugin("serverless-python-requirements", output_path) install_serverless_plugin("serverless-apigw-binary", output_path) create_aws_lambda_bundle(self.bento_service, output_path, self.region, self.stage) else: raise BentoMLException( "%s is not supported in current version of BentoML" % self.provider) shutil.copy(os.path.join(self.archive_path, "requirements.txt"), output_path) model_serivce_archive_path = os.path.join(output_path, self.bento_service.name) shutil.copytree(self.archive_path, model_serivce_archive_path) return os.path.realpath(output_path)
def _create_temporary_yaml_config(self): apis = self.bento_service.get_service_apis() serverless_config = { "service": self.bento_service.name, "provider": { "region": self.region, "stage": self.stage }, "functions": {}, } if self.platform == "google-python": serverless_config["provider"]["name"] = "google" for api in apis: serverless_config["functions"][api.name] = { "handler": api.name, "events": [{ "http": "path" }], } elif self.platform == "aws-lambda" or self.platform == "aws-lambda-py2": serverless_config["provider"]["name"] = "aws" for api in apis: serverless_config["functions"][api.name] = { "handler": "handler." + api.name, "events": [{ "http": { "path": "/" + api.name, "method": "post" } }], } else: raise BentoMLException( "check serverless does not support platform %s at the moment" % self.platform) yaml = YAML() with TempDirectory() as tempdir: saved_path = os.path.join(tempdir, "serverless.yml") yaml.dump(serverless_config, Path(saved_path)) return tempdir
def download_from_s3(s3_url, file_path): """ Download files from given s3_path and store in the given file path """ parse_result = urlparse(s3_url) bucket = parse_result.netloc base_path = parse_result.path s3_client = boto3.client("s3") list_object_result = s3_client.list_objects(Bucket=bucket, Prefix=base_path) result_content = list_object_result["Contents"] for content in result_content: relative_file_path = content["Key"][len(base_path) + 1:] # noqa: E203 local_file_path = os.path.join(file_path, relative_file_path) Path(os.path.dirname(local_file_path)).mkdir(parents=True, exist_ok=True) s3_client.download_file(Bucket=bucket, Key=content["Key"], Filename=local_file_path)
def run(ctx, api_name, archive_path=archive_path, with_conda=False): if with_conda: config = load_bentoml_config(archive_path) metadata = config['metadata'] env_name = metadata['service_name'] + '_' + metadata[ 'service_version'] yaml = YAML() yaml.default_flow_style = False tmpf = tempfile.NamedTemporaryFile(delete=False) env_path = tmpf.name yaml.dump(config['env']['conda_env'], Path(env_path)) pip_req = os.path.join(archive_path, 'requirements.txt') subprocess.call( 'command -v conda >/dev/null 2>&1 || {{ echo >&2 "--with-conda ' 'parameter requires conda but it\'s not installed."; exit 1; }} && ' 'conda env update -n {env_name} -f {env_file} && ' 'conda init bash && ' 'eval "$(conda shell.bash hook)" && ' 'conda activate {env_name} && ' '{{ [ -f {pip_req} ] && pip install -r {pip_req} || echo "no pip ' 'dependencies."; }} &&' 'bentoml {api_name} {archive_path} {args}'.format( env_name=env_name, env_file=env_path, archive_path=archive_path, api_name=api_name, args=' '.join(map(escape_shell_params, ctx.args)), pip_req=pip_req, ), shell=True, ) return track_cli('run') api = load_service_api(archive_path, api_name) api.handle_cli(ctx.args)
def generate_serverless_configuration_for_google(bento_service, apis, output_path, region, stage): config_path = os.path.join(output_path, 'serverless.yml') yaml = YAML() with open(config_path, 'r') as f: content = f.read() serverless_config = yaml.load(content) serverless_config['provider']['project'] = bento_service.name serverless_config['provider']['region'] = region logger.info('Using user defined Google region: %s', region) serverless_config['provider']['stage'] = stage logger.info('Using user defined Google stage: %s', stage) serverless_config['functions'] = {} for api in apis: function_config = {'handler': api.name, 'events': [{'http': 'path'}]} serverless_config['functions'][api.name] = function_config yaml.dump(serverless_config, Path(config_path)) return
def generate_serverless_configuration_for_gcp_function( service_name, apis, output_path, region, stage ): config_path = os.path.join(output_path, "serverless.yml") yaml = YAML() with open(config_path, "r") as f: content = f.read() serverless_config = yaml.load(content) serverless_config["service"] = service_name serverless_config["provider"]["project"] = service_name serverless_config["provider"]["region"] = region logger.info("Using user defined Google region: %s", region) serverless_config["provider"]["stage"] = stage logger.info("Using user defined Google stage: %s", stage) serverless_config["functions"] = generate_gcp_handler_functions_config(apis) yaml.dump(serverless_config, Path(config_path)) return
def delete(self): is_active, _ = self.check_status() if not is_active: raise BentoMLException("No active deployment for service %s" % self.bento_service.name) if self.platform == "google-python": provider_name = "google" elif self.platform == "aws-lambda" or self.platform == "aws-lambda-py2": provider_name = "aws" config = { "service": self.bento_service.name, "provider": { "name": provider_name, "region": self.region, "stage": self.stage, }, } yaml = YAML() with TempDirectory() as tempdir: saved_path = os.path.join(tempdir, "serverless.yml") yaml.dump(config, Path(saved_path)) with subprocess.Popen(["serverless", "remove"], cwd=tempdir, stdout=PIPE, stderr=PIPE) as proc: response = parse_serverless_response( proc.stdout.read().decode("utf-8")) logger.debug("Serverless response: %s", "\n".join(response)) if self.platform == "google-python": # TODO: Add check for Google's response return True elif self.platform == "aws-lambda" or self.platform == "aws-lambda-py2": if "Serverless: Stack removal finished..." in response: return True else: return False
def delete(self): is_active, _ = self.check_status() if not is_active: raise BentoMLException('No active deployment for service %s' % self.bento_service.name) if self.platform == 'google-python': provider_name = 'google' elif self.platform == 'aws-lambda' or self.platform == 'aws-lambda-py2': provider_name = 'aws' config = { "service": self.bento_service.name, "provider": { "name": provider_name, "region": self.region, "stage": self.stage } } yaml = YAML() with TempDirectory() as tempdir: saved_path = os.path.join(tempdir, 'serverless.yml') yaml.dump(config, Path(saved_path)) with subprocess.Popen(['serverless', 'remove'], cwd=tempdir, stdout=PIPE, stderr=PIPE) as proc: response = parse_serverless_response( proc.stdout.read().decode('utf-8')) logger.debug('Serverless response: %s', '\n'.join(response)) if self.platform == 'google-python': # TODO: Add check for Google's response return True elif self.platform == 'aws-lambda' or self.platform == 'aws-lambda-py2': if 'Serverless: Stack removal finished...' in response: return True else: return False
def add(self, bento_name, bento_version): # Full path containing saved BentoArchive, it the base path with service name # and service version as prefix. e.g.: # with base_path = '/tmp/my_bento_archive/', the saved bento will resolve in # the directory: '/tmp/my_bento_archive/service_name/version/' target_dir = os.path.join(self.base_path, bento_name, bento_version) # Ensure parent directory exist Path(os.path.join(self.base_path), bento_name).mkdir(parents=True, exist_ok=True) # Raise if target bento version already exist in storage if os.path.exists(target_dir): raise BentoMLRepositoryException( "Existing Bento {name}:{version} found in archive: {target_dir}" .format(name=bento_name, version=bento_version, target_dir=target_dir)) # Create target directory for upload os.mkdir(target_dir) return BentoUri(type=self.uri_type, uri=target_dir)