def test_yatai_server_with_postgres_and_local_storage(temporary_docker_postgres_url): with start_yatai_server(temporary_docker_postgres_url) as yatai_service_url: logger.info(f'Setting config yatai_service.url to: {yatai_service_url}') with modified_environ(BENTOML__YATAI_SERVICE__URL=yatai_service_url): logger.info('Saving bento service') svc = BentoServiceForYataiTest() svc.save() bento_tag = f'{svc.name}:{svc.version}' logger.info('BentoService saved') logger.info("Display bentoservice info") get_svc_result = get_bento_service(svc.name, svc.version) logger.info(get_svc_result) assert ( get_svc_result.bento.uri.type == BentoUri.LOCAL ), 'BentoService storage type mismatched, expect LOCAL' logger.info('Validate BentoService prediction result') run_result = run_bento_service_prediction(bento_tag, '[]') logger.info(run_result) assert 'cat' in run_result, 'Unexpected BentoService prediction result' logger.info('Delete BentoService for testing') delete_svc_result = delete_bento(bento_tag) logger.info(delete_svc_result) assert delete_svc_result is None, 'Unexpected delete BentoService message.'
def test_yatai_server_with_postgres_and_s3(temporary_docker_postgres_url): # Note: Use pre-existing bucket instead of newly created bucket, because the # bucket's global DNS needs time to get set up. # https://github.com/boto/boto3/issues/1982#issuecomment-511947643 s3_bucket_name = 's3://bentoml-e2e-test-repo/' with start_yatai_server( db_url=temporary_docker_postgres_url, repo_base_url=s3_bucket_name ) as yatai_service_url: logger.info(f'Setting config yatai_service.url to: {yatai_service_url}') with modified_environ(BENTOML__YATAI_SERVICE__URL=yatai_service_url): logger.info('Saving bento service') svc = BentoServiceForYataiTest() svc.save() bento_tag = f'{svc.name}:{svc.version}' logger.info('BentoService saved') logger.info("Display bentoservice info") get_svc_result = get_bento_service(svc.name, svc.version) logger.info(get_svc_result) assert ( get_svc_result.bento.uri.type == BentoUri.S3 ), 'BentoService storage type mismatched, expect S3' logger.info('Validate BentoService prediction result') run_result = run_bento_service_prediction(bento_tag, '[]') logger.info(run_result) assert 'cat' in run_result, 'Unexpected BentoService prediction result' logger.info('Delete BentoService for testing') delete_svc_result = delete_bento(bento_tag) logger.info(delete_svc_result) assert delete_svc_result is None, 'Unexpected delete BentoService message.'
def send_test_data_to_endpoint(endpoint_name, sample_data=None, region="us-west-2"): logger.info(f'Test deployment with sample request for {endpoint_name}') sample_data = sample_data or '"[0]"' test_command = [ 'aws', 'sagemaker-runtime', 'invoke-endpoint', '--endpoint-name', endpoint_name, '--content-type', '"application/json"', '--body', sample_data, '>(cat) 1>/dev/null', '|', 'jq .', ] logger.info('Testing command: %s', ' '.join(test_command)) with modified_environ(AWS_REGION=region): result = subprocess.run( ' '.join(test_command), capture_output=True, shell=True, check=True, executable='/bin/bash', ) logger.info(result) if result.stderr.decode('utf-8'): return False, None else: return True, result.stdout.decode('utf-8')
def send_test_data_to_endpoint(endpoint_name, sample_data=None, region="us-west-2"): logger.info(f'Test deployment with sample request for {endpoint_name}') sample_data = sample_data or '"[0]"' client = boto3.client('sagemaker-runtime') with modified_environ(AWS_REGION=region): result = client.invoke_endpoint( EndpointName=endpoint_name.strip(), ContentType='application/json', Body=sample_data, ) logger.info(result) if result.get('ResponseMetadata', None) is None: return False, None if result['ResponseMetadata']['HTTPStatusCode'] != 200: return False, None body = result['Body'].read() return True, body.decode('utf-8')