コード例 #1
0
def test_hpo_for_builtin():
    trainer = SagemakerBuiltinAlgorithmsTask(
        name="builtin-trainer",
        task_config=SagemakerTrainingJobConfig(
            training_job_resource_config=TrainingJobResourceConfig(
                instance_count=1,
                instance_type="ml-xlarge",
                volume_size_in_gb=1,
            ),
            algorithm_specification=AlgorithmSpecification(
                algorithm_name=AlgorithmName.XGBOOST, ),
        ),
    )

    hpo = SagemakerHPOTask(
        name="test",
        task_config=HPOJob(10, 10, ["x"]),
        training_task=trainer,
    )

    assert hpo.python_interface.inputs.keys() == {
        "static_hyperparameters",
        "train",
        "validation",
        "hyperparameter_tuning_job_config",
        "x",
    }
    assert hpo.python_interface.outputs.keys() == {"model"}

    assert hpo.get_custom(_get_reg_settings()) == {
        "maxNumberOfTrainingJobs": "10",
        "maxParallelTrainingJobs": "10",
        "trainingJob": {
            "algorithmSpecification": {
                "algorithmName": "XGBOOST"
            },
            "trainingJobResourceConfig": {
                "instanceCount": "1",
                "instanceType": "ml-xlarge",
                "volumeSizeInGb": "1"
            },
        },
    }

    with pytest.raises(NotImplementedError):
        hpo(
            static_hyperparameters={},
            train="",
            validation="",
            hyperparameter_tuning_job_config=HyperparameterTuningJobConfig(
                tuning_strategy=1,
                tuning_objective=HyperparameterTuningObjective(
                    objective_type=HyperparameterTuningObjectiveType.MINIMIZE,
                    metric_name="x",
                ),
                training_job_early_stopping_type=TrainingJobEarlyStoppingType.
                OFF,
            ),
            x=ParameterRangeOneOf(param=IntegerParameterRange(10, 1, 1)),
        )
コード例 #2
0
ファイル: hpo.py プロジェクト: xquek-fn/flytekit
 def to_literal(
     self,
     ctx: FlyteContext,
     python_val: _params.ParameterRangeOneOf,
     python_type: Type[_hpo_job_model.HyperparameterTuningJobConfig],
     expected: LiteralType,
 ) -> Literal:
     d = MessageToDict(python_val.to_flyte_idl())
     return DictTransformer.dict_to_generic_literal(d)
コード例 #3
0
def test_parameter_ranges_transformer():
    t = ParameterRangesTransformer()
    assert t.get_literal_type(ParameterRangeOneOf) == Generic.to_flyte_literal_type()
    o = ParameterRangeOneOf(param=IntegerParameterRange(10, 0, 1))
    ctx = FlyteContext.current_context()
    lit = t.to_literal(ctx, python_val=o, python_type=ParameterRangeOneOf, expected=None)
    assert lit is not None
    assert lit.scalar.generic is not None
    ro = t.to_python_value(ctx, lit, ParameterRangeOneOf)
    assert ro is not None
    assert ro == o
コード例 #4
0
    class MyWf(object):
        train_dataset = Input(Types.Blob)
        validation_dataset = Input(Types.Blob)
        static_hyperparameters = Input(Types.Generic)
        hyperparameter_tuning_job_config = Input(
            HyperparameterTuningJobConfig,
            default=_HyperparameterTuningJobConfig(
                tuning_strategy=HyperparameterTuningStrategy.BAYESIAN,
                tuning_objective=HyperparameterTuningObjective(
                    objective_type=HyperparameterTuningObjectiveType.MINIMIZE,
                    metric_name="validation:error",
                ),
                training_job_early_stopping_type=TrainingJobEarlyStoppingType.
                AUTO,
            ),
        )

        a = simple_xgboost_hpo_job_task(
            train=train_dataset,
            validation=validation_dataset,
            static_hyperparameters=static_hyperparameters,
            hyperparameter_tuning_job_config=hyperparameter_tuning_job_config,
            num_round=ParameterRangeOneOf(
                IntegerParameterRange(
                    min_value=3,
                    max_value=10,
                    scaling_type=HyperparameterScalingType.LINEAR)),
            max_depth=ParameterRangeOneOf(
                IntegerParameterRange(
                    min_value=5,
                    max_value=7,
                    scaling_type=HyperparameterScalingType.LINEAR)),
            gamma=ParameterRangeOneOf(
                ContinuousParameterRange(
                    min_value=0.0,
                    max_value=0.3,
                    scaling_type=HyperparameterScalingType.LINEAR)),
        )