def test_description():
    session = create_sagemaker_session(describe_tuning_result={
        'HyperParameterTuningJobConfig': {
            'ParameterRanges': {
                'CategoricalParameterRanges': [],
                'ContinuousParameterRanges': [
                    {'MaxValue': '1', 'MinValue': '0', 'Name': 'eta'},
                    {'MaxValue': '10', 'MinValue': '0', 'Name': 'gamma'},
                ],
                'IntegerParameterRanges': [
                    {'MaxValue': '30', 'MinValue': '5', 'Name': 'num_layers'},
                    {'MaxValue': '100', 'MinValue': '50', 'Name': 'iterations'},
                ],
            },
        },
    })
    tuner = HyperparameterTuningJobAnalytics("my-tuning-job", sagemaker_session=session)

    d = tuner.description()
    assert len(session.sagemaker_client.describe_hyper_parameter_tuning_job.mock_calls) == 1
    assert d is not None
    assert d['HyperParameterTuningJobConfig'] is not None
    tuner.clear_cache()
    d = tuner.description()
    assert len(session.sagemaker_client.describe_hyper_parameter_tuning_job.mock_calls) == 2
    d = tuner.description()
    assert len(session.sagemaker_client.describe_hyper_parameter_tuning_job.mock_calls) == 2
    d = tuner.description(force_refresh=True)
    assert len(session.sagemaker_client.describe_hyper_parameter_tuning_job.mock_calls) == 3

    # Check that the ranges work.
    r = tuner.tuning_ranges
    assert len(r) == 4
Exemple #2
0
def test_description():
    session = create_sagemaker_session(
        describe_tuning_result={
            "HyperParameterTuningJobConfig": {
                "ParameterRanges": {
                    "CategoricalParameterRanges": [],
                    "ContinuousParameterRanges": [
                        {
                            "MaxValue": "1",
                            "MinValue": "0",
                            "Name": "eta"
                        },
                        {
                            "MaxValue": "10",
                            "MinValue": "0",
                            "Name": "gamma"
                        },
                    ],
                    "IntegerParameterRanges": [
                        {
                            "MaxValue": "30",
                            "MinValue": "5",
                            "Name": "num_layers"
                        },
                        {
                            "MaxValue": "100",
                            "MinValue": "50",
                            "Name": "iterations"
                        },
                    ],
                }
            }
        })
    tuner = HyperparameterTuningJobAnalytics("my-tuning-job",
                                             sagemaker_session=session)

    d = tuner.description()
    assert len(session.sagemaker_client.describe_hyper_parameter_tuning_job.
               mock_calls) == 1
    assert d is not None
    assert d["HyperParameterTuningJobConfig"] is not None
    tuner.clear_cache()
    d = tuner.description()
    assert len(session.sagemaker_client.describe_hyper_parameter_tuning_job.
               mock_calls) == 2
    d = tuner.description()
    assert len(session.sagemaker_client.describe_hyper_parameter_tuning_job.
               mock_calls) == 2
    d = tuner.description(force_refresh=True)
    assert len(session.sagemaker_client.describe_hyper_parameter_tuning_job.
               mock_calls) == 3

    # Check that the ranges work.
    r = tuner.tuning_ranges
    assert len(r) == 4