def test_save_restore_assets(docker_image, sagemaker_session, opt_ml,
                             processor):
    resource_path = os.path.join(SCRIPT_PATH, '../resources/sentiment')

    default_bucket = sagemaker_session.default_bucket()

    copy_resource(resource_path, opt_ml, 'data', 'input/data')

    s3_source_archive = fw_utils.tar_and_upload_dir(
        session=sagemaker_session.boto_session,
        bucket=sagemaker_session.default_bucket(),
        s3_key_prefix='test_job',
        script='sentiment.py',
        directory=os.path.join(resource_path, 'code'))

    checkpoint_s3_path = 's3://{}/save_restore_assets/output-{}'.format(
        default_bucket, uuid.uuid4())

    additional_hyperparameters = dict(training_steps=1000,
                                      evaluation_steps=100,
                                      checkpoint_path=checkpoint_s3_path)
    create_config_files('sentiment.py', s3_source_archive.s3_prefix, opt_ml,
                        additional_hyperparameters)
    os.makedirs(os.path.join(opt_ml, 'model'))

    train(docker_image, opt_ml, processor)

    with HostingContainer(opt_ml=opt_ml,
                          image=docker_image,
                          script_name='sentiment.py',
                          processor=processor) as c:
        c.execute_pytest(
            'test/integ/container_tests/sentiment_classification.py')
Beispiel #2
0
def test_train_input_fn_with_input_channels(docker_image, sagemaker_session, opt_ml, processor):
    resource_path = os.path.join(SCRIPT_PATH, '../resources/iris')

    copy_resource(resource_path, opt_ml, 'code')
    copy_resource(resource_path, opt_ml, 'data', 'input/data')

    s3_source_archive = fw_utils.tar_and_upload_dir(session=sagemaker_session.boto_session,
                                                    bucket=sagemaker_session.default_bucket(),
                                                    s3_key_prefix='test_job',
                                                    script='iris_train_input_fn_with_channels.py',
                                                    directory=os.path.join(resource_path, 'code'))

    additional_hyperparameters = dict(training_steps=1, evaluation_steps=1)
    create_config_files('iris_train_input_fn_with_channels.py', s3_source_archive.s3_prefix,
                        opt_ml, additional_hyperparameters)
    os.makedirs(os.path.join(opt_ml, 'model'))

    train(docker_image, opt_ml, processor)

    assert file_exists(opt_ml, 'model/export/Servo'), 'model was not exported'
    assert file_exists(opt_ml, 'model/checkpoint'), 'checkpoint was not created'
    assert file_exists(opt_ml, 'output/success'), 'Success file was not created'
    assert not file_exists(opt_ml, 'output/failure'), 'Failure happened'

    with HostingContainer(image=docker_image, opt_ml=opt_ml,
                          script_name='iris_train_input_fn_with_channels.py', processor=processor) as c:
        c.execute_pytest('test/integ/container_tests/estimator_classification.py')
def test_input_fn_with_input_channels(docker_image, sagemaker_session, opt_ml, processor):
    resource_path = os.path.join(SCRIPT_PATH, '../resources/iris')

    copy_resource(resource_path, opt_ml, 'code')
    copy_resource(resource_path, opt_ml, 'data', 'input/data')

    s3_source_archive = fw_utils.tar_and_upload_dir(session=sagemaker_session.boto_session,
                                                    bucket=sagemaker_session.default_bucket(),
                                                    s3_key_prefix='test_job',
                                                    script='iris_train_input_fn_with_channels.py',
                                                    directory=os.path.join(resource_path, 'code'))

    additional_hyperparameters = dict(training_steps=1, evaluation_steps=1)
    create_config_files('iris_train_input_fn_with_channels.py', s3_source_archive.s3_prefix,
                        opt_ml, additional_hyperparameters)
    os.makedirs(os.path.join(opt_ml, 'model'))

    train(docker_image, opt_ml, processor)

    assert file_exists(opt_ml, 'model/export/Servo'), 'model was not exported'
    assert file_exists(opt_ml, 'model/checkpoint'), 'checkpoint was not created'
    assert file_exists(opt_ml, 'output/success'), 'Success file was not created'
    assert not file_exists(opt_ml, 'output/failure'), 'Failure happened'

    with HostingContainer(image=docker_image, opt_ml=opt_ml,
                          script_name='iris_train_input_fn_with_channels.py', processor=processor) as c:
        c.execute_pytest('test/integ/container_tests/estimator_classification.py')
def test_save_restore_assets(docker_image, sagemaker_session, opt_ml, processor):
    resource_path = os.path.join(SCRIPT_PATH, '../resources/sentiment')

    default_bucket = sagemaker_session.default_bucket()

    copy_resource(resource_path, opt_ml, 'data', 'input/data')

    s3_source_archive = fw_utils.tar_and_upload_dir(session=sagemaker_session.boto_session,
                                bucket=sagemaker_session.default_bucket(),
                                s3_key_prefix='test_job',
                                script='sentiment.py',
                                directory=os.path.join(resource_path, 'code'))

    checkpoint_s3_path = 's3://{}/save_restore_assets/output-{}'.format(default_bucket, uuid.uuid4())

    additional_hyperparameters = dict(
        training_steps=1000,
        evaluation_steps=100,
        checkpoint_path=checkpoint_s3_path)
    create_config_files('sentiment.py', s3_source_archive.s3_prefix, opt_ml, additional_hyperparameters)
    os.makedirs(os.path.join(opt_ml, 'model'))

    train(docker_image, opt_ml, processor)

    with HostingContainer(opt_ml=opt_ml, image=docker_image, script_name='sentiment.py', processor=processor) as c:
        c.execute_pytest('test/integ/container_tests/sentiment_classification.py')
Beispiel #5
0
def test_layers_prediction(docker_image, sagemaker_session, opt_ml):
    resource_path = os.path.join(SCRIPT_PATH, '../resources/mnist')

    copy_resource(resource_path, opt_ml, 'code')
    copy_resource(resource_path, opt_ml, 'data', 'input/data')

    s3_source_archive = fw_utils.tar_and_upload_dir(session=sagemaker_session.boto_session,
                                                    bucket=sagemaker_session.default_bucket(),
                                                    s3_key_prefix='test_job',
                                                    script='mnist.py',
                                                    directory=os.path.join(resource_path, 'code'))

    create_config_files('mnist.py', s3_source_archive.s3_prefix, opt_ml,
                        dict(training_steps=1, evaluation_steps=1))
    os.makedirs(os.path.join(opt_ml, 'model'))

    train(docker_image, opt_ml)

    assert file_exists(opt_ml, 'model/export/Servo'), 'model was not exported'
    assert file_exists(opt_ml, 'model/checkpoint'), 'checkpoint was not created'
    assert file_exists(opt_ml, 'output/success'), 'Success file was not created'
    assert not file_exists(opt_ml, 'output/failure'), 'Failure happened'

    with HostingContainer(image=docker_image, opt_ml=opt_ml,
                          script_name='mnist.py') as c:
        c.execute_pytest('test/integ/container_tests/layers_prediction.py')
def test_large_grpc_message(docker_image, opt_ml, processor, region):
    resource_path = os.path.join(SCRIPT_PATH,
                                 '../resources/large_grpc_message')
    copy_resource(resource_path, opt_ml, 'code', 'code')
    export_dir = os.path.join(opt_ml, 'model', 'export', 'Servo', '1')
    create_model(export_dir)

    with HostingContainer(opt_ml=opt_ml,
                          image=docker_image,
                          script_name='inference.py',
                          processor=processor,
                          region=region) as c:
        c.execute_pytest('test/integ/container_tests/large_grpc_message.py')
def test_estimator_classification(docker_image, sagemaker_session, opt_ml):
    resource_path = os.path.join(SCRIPT_PATH, '../resources/iris')

    copy_resource(resource_path, opt_ml, 'code')
    copy_resource(resource_path, opt_ml, 'data', 'input/data')

    s3_source_archive = fw_utils.tar_and_upload_dir(
        session=sagemaker_session.boto_session,
        bucket=sagemaker_session.default_bucket(),
        s3_key_prefix='test_job',
        script='iris.py',
        directory=os.path.join(resource_path, 'code'))

    additional_hyperparameters = dict(
        training_steps=1,
        evaluation_steps=1,
        sagemaker_requirements='requirements.txt')
    create_config_files('iris.py', s3_source_archive.s3_prefix, opt_ml,
                        additional_hyperparameters)
    os.makedirs(os.path.join(opt_ml, 'model'))

    train(docker_image, opt_ml)

    assert file_exists(opt_ml, 'model/export/Servo'), 'model was not exported'
    assert file_exists(opt_ml,
                       'model/checkpoint'), 'checkpoint was not created'
    assert file_exists(opt_ml,
                       'output/success'), 'Success file was not created'
    assert not file_exists(opt_ml, 'output/failure'), 'Failure happened'

    with HostingContainer(opt_ml=opt_ml,
                          image=docker_image,
                          script_name='iris.py',
                          requirements_file='requirements.txt') as c:
        c.execute_pytest(
            'test/integ/container_tests/estimator_classification.py')

        modules = c.execute_command(['pip', 'freeze'])
        assert 'beautifulsoup4==4.6.0' in modules
def test_no_serving_input_fn(docker_image, sagemaker_session, opt_ml, processor):
    resource_path = os.path.join(SCRIPT_PATH, '../resources/iris')

    copy_resource(resource_path, opt_ml, 'code')
    copy_resource(resource_path, opt_ml, 'data', 'input/data')

    s3_source_archive = fw_utils.tar_and_upload_dir(session=sagemaker_session.boto_session,
                                bucket=sagemaker_session.default_bucket(),
                                s3_key_prefix='test_job',
                                script='iris_no_serving_input.py',
                                directory=os.path.join(resource_path, 'code'))

    additional_hyperparameters = dict(training_steps=1, evaluation_steps=1)
    create_config_files('iris_no_serving_input.py', s3_source_archive.s3_prefix, opt_ml, additional_hyperparameters)

    os.makedirs(os.path.join(opt_ml, 'model'))

    train(docker_image, opt_ml, processor)

    # model is not saved so do not expect it there
    assert not file_exists(opt_ml, 'model/export/Servo'), 'model was not exported'
    assert file_exists(opt_ml, 'model/checkpoint'), 'checkpoint was not created'
    assert file_exists(opt_ml, 'output/success'), 'Success file was not created'
    assert not file_exists(opt_ml, 'output/failure'), 'Failure happened'