Example #1
0
def test_deploy_model(
    sklearn_training_job,
    sagemaker_session,
    cpu_instance_type,
    sklearn_latest_version,
    sklearn_latest_py_version,
):
    endpoint_name = "test-sklearn-deploy-model-{}".format(
        sagemaker_timestamp())
    with timeout_and_delete_endpoint_by_name(endpoint_name, sagemaker_session):
        desc = sagemaker_session.sagemaker_client.describe_training_job(
            TrainingJobName=sklearn_training_job)
        model_data = desc["ModelArtifacts"]["S3ModelArtifacts"]
        script_path = os.path.join(DATA_DIR, "sklearn_mnist", "mnist.py")
        model = SKLearnModel(
            model_data,
            "SageMakerRole",
            entry_point=script_path,
            framework_version=sklearn_latest_version,
            sagemaker_session=sagemaker_session,
        )
        predictor = model.deploy(1,
                                 cpu_instance_type,
                                 endpoint_name=endpoint_name)
        _predict_and_assert(predictor)
def test_model(sagemaker_session):
    model = SKLearnModel("s3://some/data.tar.gz",
                         role=ROLE,
                         entry_point=SCRIPT_PATH,
                         sagemaker_session=sagemaker_session)
    predictor = model.deploy(1, CPU)
    assert isinstance(predictor, SKLearnPredictor)
Example #3
0
def main():
    sagemaker_session = LocalSession()
    sagemaker_session.config = {'local': {'local_code': True}}

    dummy_model_file = Path("dummy.model")
    dummy_model_file.touch()

    with tarfile.open("model.tar.gz", "w:gz") as tar:
        tar.add(dummy_model_file.as_posix())

    # For local training a dummy role will be sufficient
    role = DUMMY_IAM_ROLE

    model = SKLearnModel(role=role,
                         model_data='file://./model.tar.gz',
                         framework_version='0.23-1',
                         py_version='py3',
                         source_dir='code',
                         entry_point='inference.py')

    print('Deploying endpoint in local mode')
    print(
        'Note: if launching for the first time in local mode, container image download might take a few minutes to complete.'
    )
    predictor = model.deploy(
        initial_instance_count=1,
        instance_type='local',
    )

    do_inference_on_local_endpoint(predictor)

    print('About to delete the endpoint to stop paying (if in cloud mode).')
    predictor.delete_endpoint(predictor.endpoint_name)
Example #4
0
def test_deploy_model(sklearn_training_job, sagemaker_session):
    endpoint_name = 'test-sklearn-deploy-model-{}'.format(sagemaker_timestamp())
    with timeout_and_delete_endpoint_by_name(endpoint_name, sagemaker_session):
        desc = sagemaker_session.sagemaker_client.describe_training_job(TrainingJobName=sklearn_training_job)
        model_data = desc['ModelArtifacts']['S3ModelArtifacts']
        script_path = os.path.join(DATA_DIR, 'sklearn_mnist', 'mnist.py')
        model = SKLearnModel(model_data, 'SageMakerRole', entry_point=script_path, sagemaker_session=sagemaker_session)
        predictor = model.deploy(1, "ml.m4.xlarge", endpoint_name=endpoint_name)
        _predict_and_assert(predictor)
def test_model_custom_serialization(sagemaker_session, sklearn_version):
    model = SKLearnModel(
        "s3://some/data.tar.gz",
        role=ROLE,
        entry_point=SCRIPT_PATH,
        framework_version=sklearn_version,
        sagemaker_session=sagemaker_session,
    )
    custom_serializer = Mock()
    custom_deserializer = Mock()
    predictor = model.deploy(
        1,
        CPU,
        serializer=custom_serializer,
        deserializer=custom_deserializer,
    )
    assert isinstance(predictor, SKLearnPredictor)
    assert predictor.serializer is custom_serializer
    assert predictor.deserializer is custom_deserializer
Example #6
0
def main():

    # Prepare data for model inference - we use the Boston housing dataset
    print('Preparing data for model inference')
    data = fetch_california_housing()
    X_train, X_test, y_train, y_test = train_test_split(data.data,
                                                        data.target,
                                                        test_size=0.25,
                                                        random_state=42)

    # we don't train a model, so we will need only the testing data
    testX = pd.DataFrame(X_test, columns=data.feature_names)

    # Download a pre-trained model file
    print('Downloading a pre-trained model file')
    s3.download_file(
        'aws-ml-blog',
        'artifacts/scikit_learn_bring_your_own_model/model.joblib',
        'model.joblib')

    # Creating a model.tar.gz file
    tar = tarfile.open('model.tar.gz', 'w:gz')
    tar.add('model.joblib')
    tar.close()

    model = SKLearnModel(role=DUMMY_IAM_ROLE,
                         model_data='file://./model.tar.gz',
                         framework_version='0.23-1',
                         py_version='py3',
                         source_dir='code',
                         entry_point='inference.py')

    print('Deploying endpoint in local mode')
    predictor = model.deploy(initial_instance_count=1, instance_type='local')

    predictions = predictor.predict(testX[data.feature_names].head(5))
    print(f"Predictions: {predictions}")

    print('About to delete the endpoint to stop paying (if in cloud mode).')
    predictor.delete_endpoint(predictor.endpoint_name)
    entry_point='entry_point.py',
    source_dir=str(Path(current_folder, '../containers/model/src').resolve()),
    dependencies=[str(Path(current_folder, '../package/package').resolve())],
    image=config.ECR_IMAGE,
    code_location='s3://' +
    str(Path(config.S3_BUCKET, config.OUTPUTS_S3_PREFIX)))

# %% [markdown]
# Calling `deploy` will start a container to host the model.
# You can expect this step to take approximately 5 minutes.

# %%
model.deploy(endpoint_name=explainer_name,
             instance_type='ml.c5.xlarge',
             initial_instance_count=1,
             tags=[{
                 'Key': config.TAG_KEY,
                 'Value': config.SOLUTION_PREFIX
             }])

# %% [markdown]
# When you're trying to update the model for development purposes, but
# experiencing issues because the model/endpoint-config/endpoint already
# exists, you can delete the existing model/endpoint-config/endpoint by
# uncommenting and running the following commands:

# %%
# sagemaker_client.delete_endpoint(EndpointName=explainer_name)
# sagemaker_client.delete_endpoint_config(EndpointConfigName=explainer_name)
# sagemaker_client.delete_model(ModelName=explainer_name)