def test_configure(client):
    s3 = MagicMock()
    client.return_value = s3
    loc = {'LocationConstraint': BUCKET_REGION}
    s3.get_bucket_location.return_value = loc

    s3_utils.configure(MODEL_DIR, JOB_REGION)

    assert os.environ['S3_REGION'] == BUCKET_REGION
    assert os.environ['TF_CPP_MIN_LOG_LEVEL'] == '1'
    assert os.environ['S3_USE_HTTPS'] == '1'
def main():
    """Training entry point
    """
    hyperparameters = environment.read_hyperparameters()
    env = environment.Environment(hyperparameters=hyperparameters)

    user_hyperparameters = env.hyperparameters

    # If the training job is part of the multiple training jobs for tuning, we need to append the training job name to
    # model_dir in case they read from/write to the same object
    if "_tuning_objective_metric" in hyperparameters:
        model_dir = _model_dir_with_training_job(hyperparameters.get("model_dir"), env.job_name)
        logger.info("Appending the training job name to model_dir: {}".format(model_dir))
        user_hyperparameters["model_dir"] = model_dir

    s3_utils.configure(user_hyperparameters.get("model_dir"), os.environ.get("SAGEMAKER_REGION"))
    train(env, mapping.to_cmd_args(user_hyperparameters))
    _log_model_missing_warning(MODEL_DIR)
def test_configure_local_dir():
    s3_utils.configure('/opt/ml/model', JOB_REGION)

    assert os.environ['S3_REGION'] == JOB_REGION
    assert os.environ['TF_CPP_MIN_LOG_LEVEL'] == '1'
    assert os.environ['S3_USE_HTTPS'] == '1'
def test_configure_local_dir():
    s3_utils.configure("/opt/ml/model", JOB_REGION)

    assert os.environ["S3_REGION"] == JOB_REGION
    assert os.environ["TF_CPP_MIN_LOG_LEVEL"] == "1"
    assert os.environ["S3_USE_HTTPS"] == "1"