Exemple #1
0
def test_aws_lambda_deployment(iris_clf_service):
    random_hash = uuid.uuid4().hex[:6]
    deployment_name = f'tests-lambda-e2e-{random_hash}'

    create_deployment_command = [
        'bentoml',
        'lambda',
        'deploy',
        deployment_name,
        '-b',
        iris_clf_service,
        '--region',
        'us-west-2',
        '--verbose',
    ]
    try:
        deployment_success, deployment_endpoint = run_lambda_create_or_update_command(
            create_deployment_command
        )
        assert deployment_success, "AWS Lambda deployment creation should success"
        assert deployment_endpoint, "AWS Lambda deployment should have endpoint"

        iris = datasets.load_iris()
        sample_data = iris.data[0:1]
        status_code, content = send_test_data_to_endpoint(
            deployment_endpoint, json.dumps(sample_data.tolist())
        )
        assert status_code == 200, "prediction request should success"
        assert content == '[0]', "prediction result mismatch"
    finally:
        delete_deployment('lambda', deployment_name)
def test_sagemaker_update_deployment(basic_bentoservice_v1,
                                     basic_bentoservice_v2):
    random_hash = uuid.uuid4().hex[:6]
    deployment_name = f'tests-sagemaker-update-e2e-{random_hash}'
    region = 'us-west-2'

    create_deployment_command = [
        'bentoml',
        'sagemaker',
        'deploy',
        deployment_name,
        '-b',
        basic_bentoservice_v1,
        '--api-name',
        'predict',
        '--region',
        region,
        '--verbose',
    ]
    try:
        deployment_success, endpoint_name = run_sagemaker_create_or_update_command(
            create_deployment_command)
        assert deployment_success, 'Sagemaker deployment was unsuccessful'
        assert endpoint_name, 'Sagemaker deployment endpoint name is missing'

        request_success, prediction_result = send_test_data_to_endpoint(
            endpoint_name)
        assert request_success, 'Failed to make successful Sagemaker request'
        assert (prediction_result.strip() == '"cat"'
                ), 'Sagemaker prediction result mismatch'

        update_bento_version_deployment_command = [
            'bentoml',
            'sagemaker',
            'update',
            deployment_name,
            '-b',
            basic_bentoservice_v2,
            '--wait',
            '--verbose',
        ]
        (
            updated_deployment_success,
            endpoint_name,
        ) = run_sagemaker_create_or_update_command(
            update_bento_version_deployment_command)
        assert (updated_deployment_success
                ), 'Sagemaker update deployment was unsuccessful'
        assert endpoint_name, 'Sagemaker deployment endpoint name is missing'

        request_success, prediction_result = send_test_data_to_endpoint(
            endpoint_name)
        assert request_success, 'Failed to make successful Sagemaker request'
        assert (prediction_result.strip() == '"dog"'
                ), 'Sagemaker prediction result mismatch'
    finally:
        delete_deployment('sagemaker', deployment_name)
Exemple #3
0
def test_azure_function_deployment(iris_clf_service):
    random_hash = uuid.uuid4().hex[:6]
    deployment_name = f'test-azures-{random_hash}'
    command = f"""\
bentoml azure-functions deploy {deployment_name} -b {iris_clf_service} \
--location westus --max-burst 2 --function-auth-level anonymous --debug\
""".split(
        ' '
    )
    try:
        logger.info(f'Deploying {deployment_name} to Azure function')
        with subprocess.Popen(
            command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
        ) as proc:
            stdout = proc.stdout.read().decode('utf-8')
        logger.info(stdout)
        assert proc.returncode == 0, 'Failed to create Azure Functions deployment'
        deploy_command_stdout_list = stdout.split('\n')
        endpoint = None
        for index, message in enumerate(deploy_command_stdout_list):
            if '"hostNames": [' in message:
                endpoint = (
                    deploy_command_stdout_list[index + 1].strip().replace('"', '')
                )
        # Azure takes long time to download docker image, waiting at least 5 minutes
        # for it to be ready.
        logger.info('Sleeping 5 mins to wait for Azure download docker image')
        time.sleep(500)
        start_time = time.time()
        while (time.time() - start_time) < 400:
            logger.info(f'Making request to endpoint {endpoint}')
            request_result = requests.post(
                f'https://{endpoint}/predict',
                data='[[5, 4, 3, 2]]',
                headers={'Content-Type': 'application/json'},
            )
            logger.info(
                f'Request result {request_result.status_code} '
                f'{request_result.content.decode("utf-8")}'
            )
            if request_result.status_code == 503 or request_result.status_code == 502:
                time.sleep(100)
            else:
                break
        assert (
            request_result.status_code == 200
        ), 'Azure function deployment prediction request failed'
        assert (
            request_result.content.decode('utf-8') == '[1]'
        ), 'Azure function deployment prediction result mismatch'
    finally:
        delete_deployment('azure-functions', deployment_name)
Exemple #4
0
def test_aws_lambda_update_deployment(basic_bentoservice_v1,
                                      basic_bentoservice_v2):
    random_hash = uuid.uuid4().hex[:6]
    deployment_name = f'tests-lambda-update-{random_hash}'

    create_deployment_command = [
        'bentoml',
        'lambda',
        'deploy',
        deployment_name,
        '-b',
        basic_bentoservice_v1,
        '--region',
        'us-west-2',
        '--verbose',
    ]
    try:
        deployment_success, deployment_endpoint = run_lambda_create_or_update_command(
            create_deployment_command)
        assert deployment_success, "AWS Lambda deployment creation should success"
        assert deployment_endpoint, "AWS Lambda deployment should have endpoint"
        status_code, content = send_test_data_to_endpoint(deployment_endpoint)
        assert status_code == 200, "prediction request should success"
        assert content == '"cat"', "prediction result mismatch"

        update_deployment_command = [
            'bentoml',
            'lambda',
            'update',
            deployment_name,
            '-b',
            basic_bentoservice_v2,
            '--verbose',
        ]

        (
            update_deployment_success,
            update_deployment_endpoint,
        ) = run_lambda_create_or_update_command(update_deployment_command)
        assert (update_deployment_success
                ), "AWS Lambda deployment creation should success"
        assert update_deployment_endpoint, "AWS Lambda deployment should have endpoint"

        status_code, content = send_test_data_to_endpoint(deployment_endpoint)
        assert status_code == 200, "Updated prediction request should success"
        assert content == '"dog"', "Updated prediction result mismatch"
    finally:
        delete_deployment('lambda', deployment_name)
Exemple #5
0
def test_aws_ec2_deployment(iris_clf_service):
    random_hash = uuid.uuid4().hex[:6]
    deployment_name = f'tests-ec2-e2e-{random_hash}'
    deployment_namespace = "dev"
    deployment_region = get_default_aws_region()

    create_deployment_command = [
        'bentoml',
        'ec2',
        'deploy',
        deployment_name,
        '-b',
        iris_clf_service,
        '--namespace',
        deployment_namespace,
        '--region',
        deployment_region,
        '--verbose',
    ]

    try:
        deployment_endpoint = run_aws_ec2_create_command(create_deployment_command)

        instance_addresses = wait_for_healthy_targets_from_stack(
            name=deployment_name,
            namespace=deployment_namespace,
            region=deployment_region,
        )
        assert deployment_endpoint, "AWS EC2 deployment creation should success"
        assert instance_addresses, "AWS EC2 deployment should have all targets healthy"

        iris = datasets.load_iris()
        sample_data = iris.data[0:1]
        results = send_test_data_to_multiple_endpoint(
            [deployment_endpoint] + instance_addresses, json.dumps(sample_data.tolist())
        )
        for result in results:
            assert result[0] == 200, "prediction request should success"
            assert result[1] == '[0]', "prediction result mismatch"
    finally:
        delete_deployment('ec2', deployment_name, deployment_namespace)
def test_sagemaker_deployment(iris_clf_service):

    random_hash = uuid.uuid4().hex[:6]
    deployment_name = f'tests-sagemaker-e2e-{random_hash}'
    region = 'us-west-2'
    create_deployment_command = [
        'bentoml',
        'sagemaker',
        'deploy',
        deployment_name,
        '-b',
        iris_clf_service,
        '--region',
        region,
        '--api-name',
        'predict',
        '--num-of-gunicorn-workers-per-instance',
        '2',
        '--wait',
        '--verbose',
    ]

    try:
        deployment_success, endpoint_name = run_sagemaker_create_or_update_command(
            create_deployment_command)
        assert deployment_success, 'Sagemaker deployment was unsuccessful'
        assert endpoint_name, 'Sagemaker deployment endpoint name is missing'

        iris = datasets.load_iris()
        sample_data = iris.data[0:1]
        request_success, prediction_result = send_test_data_to_endpoint(
            endpoint_name, f'"{json.dumps(sample_data.tolist())}"', region)
        assert request_success, 'Failed to make successful Sagemaker prediction'
        assert ('[\n  0\n]\n' == prediction_result
                ), 'Sagemaker prediction result mismatches with expected value'
    finally:
        delete_deployment('sagemaker', deployment_name)