Example #1
0
def main():
    sagemaker_session = LocalSession()
    sagemaker_session.config = {'local': {'local_code': True}}

    dummy_model_file = Path("dummy.model")
    dummy_model_file.touch()

    with tarfile.open("model.tar.gz", "w:gz") as tar:
        tar.add(dummy_model_file.as_posix())

    # For local training a dummy role will be sufficient
    role = DUMMY_IAM_ROLE

    model = SKLearnModel(role=role,
                         model_data='file://./model.tar.gz',
                         framework_version='0.23-1',
                         py_version='py3',
                         source_dir='code',
                         entry_point='inference.py')

    print('Deploying endpoint in local mode')
    print(
        'Note: if launching for the first time in local mode, container image download might take a few minutes to complete.'
    )
    predictor = model.deploy(
        initial_instance_count=1,
        instance_type='local',
    )

    do_inference_on_local_endpoint(predictor)

    print('About to delete the endpoint to stop paying (if in cloud mode).')
    predictor.delete_endpoint(predictor.endpoint_name)
#   3. Open terminal and run the following commands:
#       docker build  -t sagemaker-sklearn-rf-regressor-local container/.
########################################################################################################################
import tarfile

import boto3
import pandas as pd
from sagemaker import Model, LocalSession
from sagemaker.deserializers import CSVDeserializer
from sklearn.datasets import fetch_california_housing
from sklearn.model_selection import train_test_split
from sagemaker.predictor import Predictor
from sagemaker.serializers import CSVSerializer

sagemaker_session = LocalSession()
sagemaker_session.config = {'local': {'local_code': True}}

DUMMY_IAM_ROLE = 'arn:aws:iam::111111111111:role/service-role/AmazonSageMaker-ExecutionRole-20200101T000001'
s3 = boto3.client('s3')


def main():

    image_name = "sagemaker-sklearn-rf-regressor-local"

    # Prepare data for model inference - we use the Boston housing dataset
    print('Preparing data for model inference')
    data = fetch_california_housing()
    X_train, X_test, y_train, y_test = train_test_split(data.data,
                                                        data.target,
                                                        test_size=0.25,