def test_stop_tuning_job(sagemaker_session, tuner): sagemaker_session.stop_tuning_job = Mock(name='stop_hyper_parameter_tuning_job') tuner.latest_tuning_job = _TuningJob(sagemaker_session, JOB_NAME) tuner.stop_tuning_job() sagemaker_session.stop_tuning_job.assert_called_once_with(name=JOB_NAME)
def test_wait(tuner): tuner.latest_tuning_job = _TuningJob(tuner.estimator.sagemaker_session, JOB_NAME) tuner.estimator.sagemaker_session.wait_for_tuning_job = Mock(name='wait_for_tuning_job') tuner.wait() tuner.estimator.sagemaker_session.wait_for_tuning_job.assert_called_once_with(JOB_NAME)
def test_deploy_default(tuner): returned_training_job_description = { 'AlgorithmSpecification': { 'TrainingInputMode': 'File', 'TrainingImage': IMAGE_NAME }, 'HyperParameters': { 'sagemaker_submit_directory': '"s3://some/sourcedir.tar.gz"', 'checkpoint_path': '"s3://other/1508872349"', 'sagemaker_program': '"iris-dnn-classifier.py"', 'sagemaker_enable_cloudwatch_metrics': 'false', 'sagemaker_container_log_level': '"logging.INFO"', 'sagemaker_job_name': '"neo"', 'training_steps': '100', '_tuning_objective_metric': 'Validation-accuracy', }, 'RoleArn': ROLE, 'ResourceConfig': { 'VolumeSizeInGB': 30, 'InstanceCount': 1, 'InstanceType': 'ml.c4.xlarge' }, 'StoppingCondition': { 'MaxRuntimeInSeconds': 24 * 60 * 60 }, 'TrainingJobName': 'neo', 'TrainingJobStatus': 'Completed', 'OutputDataConfig': { 'KmsKeyId': '', 'S3OutputPath': 's3://place/output/neo' }, 'TrainingJobOutput': { 'S3TrainingJobOutput': 's3://here/output.tar.gz' }, 'ModelArtifacts': { 'S3ModelArtifacts': MODEL_DATA } } tuning_job_description = {'BestTrainingJob': {'TrainingJobName': JOB_NAME}} tuner.estimator.sagemaker_session.sagemaker_client.describe_training_job = \ Mock(name='describe_training_job', return_value=returned_training_job_description) tuner.estimator.sagemaker_session.sagemaker_client.describe_hyper_parameter_tuning_job = Mock( name='describe_hyper_parameter_tuning_job', return_value=tuning_job_description) tuner.estimator.sagemaker_session.log_for_jobs = Mock(name='log_for_jobs') tuner.latest_tuning_job = _TuningJob(tuner.estimator.sagemaker_session, JOB_NAME) predictor = tuner.deploy(TRAIN_INSTANCE_COUNT, TRAIN_INSTANCE_TYPE) tuner.estimator.sagemaker_session.create_model.assert_called_once() args = tuner.estimator.sagemaker_session.create_model.call_args[0] assert args[0].startswith(IMAGE_NAME) assert args[1] == ROLE assert args[2]['Image'] == IMAGE_NAME assert args[2]['ModelDataUrl'] == MODEL_DATA assert isinstance(predictor, RealTimePredictor) assert predictor.endpoint.startswith(JOB_NAME) assert predictor.sagemaker_session == tuner.estimator.sagemaker_session
def test_tuning_job_wait(sagemaker_session): sagemaker_session.wait_for_tuning_job = Mock(name='wait_for_tuning_job') tuning_job = _TuningJob(sagemaker_session, JOB_NAME) tuning_job.wait() sagemaker_session.wait_for_tuning_job.assert_called_once_with(JOB_NAME)
def test_deploy_default(tuner): returned_training_job_description = { "AlgorithmSpecification": { "TrainingInputMode": "File", "TrainingImage": IMAGE_NAME, "MetricDefinitions": METRIC_DEFINITIONS, }, "HyperParameters": { "sagemaker_submit_directory": '"s3://some/sourcedir.tar.gz"', "checkpoint_path": '"s3://other/1508872349"', "sagemaker_program": '"iris-dnn-classifier.py"', "sagemaker_enable_cloudwatch_metrics": "false", "sagemaker_container_log_level": '"logging.INFO"', "sagemaker_job_name": '"neo"', "training_steps": "100", "_tuning_objective_metric": "Validation-accuracy", }, "RoleArn": ROLE, "ResourceConfig": { "VolumeSizeInGB": 30, "InstanceCount": 1, "InstanceType": "ml.c4.xlarge", }, "StoppingCondition": {"MaxRuntimeInSeconds": 24 * 60 * 60}, "TrainingJobName": "neo", "TrainingJobStatus": "Completed", "TrainingJobArn": "arn:aws:sagemaker:us-west-2:336:training-job/neo", "OutputDataConfig": {"KmsKeyId": "", "S3OutputPath": "s3://place/output/neo"}, "TrainingJobOutput": {"S3TrainingJobOutput": "s3://here/output.tar.gz"}, "ModelArtifacts": {"S3ModelArtifacts": MODEL_DATA}, } tuning_job_description = {"BestTrainingJob": {"TrainingJobName": JOB_NAME}} returned_list_tags = {"Tags": [{"Key": "TagtestKey", "Value": "TagtestValue"}]} tuner.estimator.sagemaker_session.sagemaker_client.describe_training_job = Mock( name="describe_training_job", return_value=returned_training_job_description ) tuner.estimator.sagemaker_session.sagemaker_client.list_tags = Mock( name="list_tags", return_value=returned_list_tags ) tuner.estimator.sagemaker_session.sagemaker_client.describe_hyper_parameter_tuning_job = Mock( name="describe_hyper_parameter_tuning_job", return_value=tuning_job_description ) tuner.estimator.sagemaker_session.log_for_jobs = Mock(name="log_for_jobs") tuner.latest_tuning_job = _TuningJob(tuner.estimator.sagemaker_session, JOB_NAME) predictor = tuner.deploy(TRAIN_INSTANCE_COUNT, TRAIN_INSTANCE_TYPE) tuner.estimator.sagemaker_session.create_model.assert_called_once() args = tuner.estimator.sagemaker_session.create_model.call_args[0] assert args[0] == "neo" assert args[1] == ROLE assert args[2]["Image"] == IMAGE_NAME assert args[2]["ModelDataUrl"] == MODEL_DATA assert isinstance(predictor, RealTimePredictor) assert predictor.endpoint.startswith(JOB_NAME) assert predictor.sagemaker_session == tuner.estimator.sagemaker_session
def test_start_new(tuner, sagemaker_session): tuning_job = _TuningJob(sagemaker_session, JOB_NAME) tuner.static_hyperparameters = {} started_tuning_job = tuning_job.start_new(tuner, INPUTS) assert started_tuning_job.sagemaker_session == sagemaker_session sagemaker_session.tune.assert_called_once()
def test_delete_endpoint(tuner): tuner.latest_tuning_job = _TuningJob(tuner.estimator.sagemaker_session, JOB_NAME) tuning_job_description = {'BestTrainingJob': {'TrainingJobName': JOB_NAME}} tuner.estimator.sagemaker_session.sagemaker_client.describe_hyper_parameter_tuning_job = Mock( name='describe_hyper_parameter_tuning_job', return_value=tuning_job_description) tuner.delete_endpoint() tuner.sagemaker_session.delete_endpoint.assert_called_with(JOB_NAME)
def test_best_tuning_job(tuner): tuning_job_description = {'BestTrainingJob': {'TrainingJobName': JOB_NAME}} tuner.estimator.sagemaker_session.sagemaker_client.describe_hyper_parameter_tuning_job = Mock( name='describe_hyper_parameter_tuning_job', return_value=tuning_job_description) tuner.latest_tuning_job = _TuningJob(tuner.estimator.sagemaker_session, JOB_NAME) best_training_job = tuner.best_training_job() assert best_training_job == JOB_NAME tuner.estimator.sagemaker_session.sagemaker_client.describe_hyper_parameter_tuning_job.assert_called_once_with( HyperParameterTuningJobName=JOB_NAME)
def test_best_tuning_job_no_best_job(tuner): tuning_job_description = {'BestTrainingJob': {'Mock': None}} tuner.estimator.sagemaker_session.sagemaker_client.describe_hyper_parameter_tuning_job = Mock( name='describe_hyper_parameter_tuning_job', return_value=tuning_job_description) tuner.latest_tuning_job = _TuningJob(tuner.estimator.sagemaker_session, JOB_NAME) with pytest.raises(Exception) as e: tuner.best_training_job() tuner.estimator.sagemaker_session.sagemaker_client.describe_hyper_parameter_tuning_job.assert_called_once_with( HyperParameterTuningJobName=JOB_NAME) assert 'Best training job not available for tuning job:' in str(e)
def test_analytics(tuner): tuner.latest_tuning_job = _TuningJob(tuner.sagemaker_session, 'testjob') tuner_analytics = tuner.analytics() assert tuner_analytics is not None assert tuner_analytics.name.find('testjob') > -1
def test_stop(sagemaker_session): tuning_job = _TuningJob(sagemaker_session, JOB_NAME) tuning_job.stop() sagemaker_session.stop_tuning_job.assert_called_once_with(name=JOB_NAME)