def test_description(): session = create_sagemaker_session(describe_tuning_result={ 'HyperParameterTuningJobConfig': { 'ParameterRanges': { 'CategoricalParameterRanges': [], 'ContinuousParameterRanges': [ {'MaxValue': '1', 'MinValue': '0', 'Name': 'eta'}, {'MaxValue': '10', 'MinValue': '0', 'Name': 'gamma'}, ], 'IntegerParameterRanges': [ {'MaxValue': '30', 'MinValue': '5', 'Name': 'num_layers'}, {'MaxValue': '100', 'MinValue': '50', 'Name': 'iterations'}, ], }, }, }) tuner = HyperparameterTuningJobAnalytics("my-tuning-job", sagemaker_session=session) d = tuner.description() assert len(session.sagemaker_client.describe_hyper_parameter_tuning_job.mock_calls) == 1 assert d is not None assert d['HyperParameterTuningJobConfig'] is not None tuner.clear_cache() d = tuner.description() assert len(session.sagemaker_client.describe_hyper_parameter_tuning_job.mock_calls) == 2 d = tuner.description() assert len(session.sagemaker_client.describe_hyper_parameter_tuning_job.mock_calls) == 2 d = tuner.description(force_refresh=True) assert len(session.sagemaker_client.describe_hyper_parameter_tuning_job.mock_calls) == 3 # Check that the ranges work. r = tuner.tuning_ranges assert len(r) == 4
def test_description(): session = create_sagemaker_session( describe_tuning_result={ "HyperParameterTuningJobConfig": { "ParameterRanges": { "CategoricalParameterRanges": [], "ContinuousParameterRanges": [ { "MaxValue": "1", "MinValue": "0", "Name": "eta" }, { "MaxValue": "10", "MinValue": "0", "Name": "gamma" }, ], "IntegerParameterRanges": [ { "MaxValue": "30", "MinValue": "5", "Name": "num_layers" }, { "MaxValue": "100", "MinValue": "50", "Name": "iterations" }, ], } } }) tuner = HyperparameterTuningJobAnalytics("my-tuning-job", sagemaker_session=session) d = tuner.description() assert len(session.sagemaker_client.describe_hyper_parameter_tuning_job. mock_calls) == 1 assert d is not None assert d["HyperParameterTuningJobConfig"] is not None tuner.clear_cache() d = tuner.description() assert len(session.sagemaker_client.describe_hyper_parameter_tuning_job. mock_calls) == 2 d = tuner.description() assert len(session.sagemaker_client.describe_hyper_parameter_tuning_job. mock_calls) == 2 d = tuner.description(force_refresh=True) assert len(session.sagemaker_client.describe_hyper_parameter_tuning_job. mock_calls) == 3 # Check that the ranges work. r = tuner.tuning_ranges assert len(r) == 4
def test_tuner_dataframe(): def mock_summary(name="job-name", value=0.9): return { "TrainingJobName": name, "TrainingJobStatus": "Completed", "FinalHyperParameterTuningJobObjectiveMetric": { "Name": "awesomeness", "Value": value, }, "TrainingStartTime": datetime.datetime(2018, 5, 16, 1, 2, 3), "TrainingEndTime": datetime.datetime(2018, 5, 16, 5, 6, 7), "TunedHyperParameters": { "learning_rate": 0.1, "layers": 137, }, } session = create_sagemaker_session(list_training_results={ "TrainingJobSummaries": [ mock_summary(), mock_summary(), mock_summary(), mock_summary(), mock_summary(), ] }) tuner = HyperparameterTuningJobAnalytics("my-tuning-job", sagemaker_session=session) df = tuner.dataframe() assert df is not None assert len(df) == 5 assert len(session.sagemaker_client.list_training_jobs_for_hyper_parameter_tuning_job.mock_calls) == 1 # Clear the cache, check that it calls the service again. tuner.clear_cache() df = tuner.dataframe() assert len(session.sagemaker_client.list_training_jobs_for_hyper_parameter_tuning_job.mock_calls) == 2 df = tuner.dataframe(force_refresh=True) assert len(session.sagemaker_client.list_training_jobs_for_hyper_parameter_tuning_job.mock_calls) == 3 # check that the hyperparameter is in the dataframe assert len(df['layers']) == 5 assert min(df['layers']) == 137 # Check that the training time calculation is returning something sane. assert min(df['TrainingElapsedTimeSeconds']) > 5 assert max(df['TrainingElapsedTimeSeconds']) < 86400 # Export to CSV and check that file exists tmp_name = "/tmp/unit-test-%s.csv" % uuid.uuid4() assert not os.path.isfile(tmp_name) tuner.export_csv(tmp_name) assert os.path.isfile(tmp_name) os.unlink(tmp_name)