Exemplo n.º 1
0
        utility_function_implementation=ConfidenceBoundUtilityFunction.
        __name__,
        numeric_optimizer_implementation=RandomSearchOptimizer.__name__,
        confidence_bound_utility_function_config=
        confidence_bound_utility_function_config_store.default,
        random_search_optimizer_config=random_search_optimizer_config_store.
        default,
        fraction_random_suggestions=0.5))

experiment_designer_config_store.add_config_by_name(
    config_name="default_glow_worm_config",
    config_point=Point(
        utility_function_implementation=ConfidenceBoundUtilityFunction.
        __name__,
        numeric_optimizer_implementation=GlowWormSwarmOptimizer.__name__,
        confidence_bound_utility_function_config=
        confidence_bound_utility_function_config_store.default,
        glow_worm_swarm_optimizer_config=glow_worm_swarm_optimizer_config_store
        .default,
        fraction_random_suggestions=0.5),
    description=
    "Experiment designer config with glow worm swarm optimizer as a utility function optimizer."
)

experiment_designer_config_store.add_config_by_name(
    config_name="default_multi_objective_config",
    config_point=Point(
        utility_function_implementation=
        MultiObjectiveProbabilityOfImprovementUtilityFunction.__name__,
        numeric_optimizer_implementation=RandomSearchOptimizer.__name__,
        multi_objective_probability_of_improvement_config=
        multi_objective_probability_of_improvement_utility_function_config_store
                min=2,
                max=10000)
        ]).join(
            subgrid=homogeneous_random_forest_config_store.parameter_space,
            on_external_dimension=CategoricalDimension(
                name="surrogate_model_implementation",
                values=[
                    HomogeneousRandomForestRegressionModel.__name__
                ])).join(
                    subgrid=experiment_designer_config_store.parameter_space,
                    on_external_dimension=CategoricalDimension(
                        name="experiment_designer_implementation",
                        values=[ExperimentDesigner.__name__])),
    default=Point(
        surrogate_model_implementation=HomogeneousRandomForestRegressionModel.
        __name__,
        experiment_designer_implementation=ExperimentDesigner.__name__,
        min_samples_required_for_guided_design_of_experiments=10,
        homogeneous_random_forest_regression_model_config=
        homogeneous_random_forest_config_store.default,
        experiment_designer_config=experiment_designer_config_store.default),
    description="TODO")

# Add a config with homogeneous random forest where the decision trees refit for every new observation.
#
optimizer_config = bayesian_optimizer_config_store.default
optimizer_config.homogeneous_random_forest_regression_model_config.decision_tree_regression_model_config.n_new_samples_before_refit = 1
optimizer_config.homogeneous_random_forest_regression_model_config.n_estimators = 50
bayesian_optimizer_config_store.add_config_by_name(
    config_name='default_refit_tree_every_time', config_point=optimizer_config)
    default=Point(
        surrogate_model_implementation=HomogeneousRandomForestRegressionModel.
        __name__,
        experiment_designer_implementation=ExperimentDesigner.__name__,
        min_samples_required_for_guided_design_of_experiments=10,
        homogeneous_random_forest_regression_model_config=
        homogeneous_random_forest_config_store.default,
        experiment_designer_config=experiment_designer_config_store.default),
    description="TODO")

# Add a config with homogeneous random forest where the decision trees refit for every new observation.
#
optimizer_config = bayesian_optimizer_config_store.default
optimizer_config.homogeneous_random_forest_regression_model_config.decision_tree_regression_model_config.n_new_samples_before_refit = 1
optimizer_config.homogeneous_random_forest_regression_model_config.n_estimators = 50
bayesian_optimizer_config_store.add_config_by_name(
    config_name='default_refit_tree_every_time', config_point=optimizer_config)

# Add a default config with glowworm swarm optimizer
#
bayesian_optimizer_config_store.add_config_by_name(
    config_name="default_with_glow_worm",
    config_point=Point(
        surrogate_model_implementation=HomogeneousRandomForestRegressionModel.
        __name__,
        experiment_designer_implementation=ExperimentDesigner.__name__,
        min_samples_required_for_guided_design_of_experiments=10,
        homogeneous_random_forest_regression_model_config=
        homogeneous_random_forest_config_store.default,
        experiment_designer_config=experiment_designer_config_store.
        get_config_by_name("default_glow_worm_config")))
Exemplo n.º 4
0
        utility_function_implementation=ConfidenceBoundUtilityFunction.
        __name__,
        numeric_optimizer_implementation=RandomSearchOptimizer.__name__,
        confidence_bound_utility_function_config=
        confidence_bound_utility_function_config_store.default,
        random_search_optimizer_config=random_search_optimizer_config_store.
        default,
        fraction_random_suggestions=0.5))

experiment_designer_config_store.add_config_by_name(
    config_name="default_glow_worm_config",
    config_point=Point(
        utility_function_implementation=ConfidenceBoundUtilityFunction.
        __name__,
        numeric_optimizer_implementation=GlowWormSwarmOptimizer.__name__,
        confidence_bound_utility_function_config=
        confidence_bound_utility_function_config_store.default,
        glow_worm_swarm_optimizer_config=glow_worm_swarm_optimizer_config_store
        .default,
        fraction_random_suggestions=0.5),
    description=
    "Experiment designer config with glow worm swarm optimizer as a utility function optimizer."
)


class ExperimentDesigner:
    """ Portion of a BayesianOptimizer concerned with Design of Experiments.

    The two main components of a Bayesian Optimizer are:
    * the surrogate model - responsible for fitting a regression function to try to predict some performance metric(s)
        based on suggested config, and context information
    * the experiment designer - responsible for suggesting the next configuration to try against the real system.
Exemplo n.º 5
0
    ),
    default=Point(
        uncertainty_type="constant",
        use_objective_function=True,
        predicted_value_degrees_of_freedom=10,
        constant_uncertainty_config=Point(value=1),
        objective_function_config=objective_function_config_store.get_config_by_name("three_level_quadratic")
    ),
    description=""
)

multi_objective_pass_through_model_config_store.add_config_by_name(
    config_name="three_level_quadratic",
    config_point=Point(
        uncertainty_type="constant",
        use_objective_function=True,
        predicted_value_degrees_of_freedom=10,
        constant_uncertainty_config=Point(value=1),
        objective_function_config=objective_function_config_store.get_config_by_name("three_level_quadratic")
    )
)

multi_objective_pass_through_model_config_store.add_config_by_name(
    config_name="multi_objective_waves_3_params_2_objectives_half_pi_phase_difference",
    config_point=Point(
        uncertainty_type="coefficient_of_variation",
        use_objective_function=True,
        predicted_value_degrees_of_freedom=10,
        coefficient_of_variation_config=Point(value=0.1),
        objective_function_config=objective_function_config_store.get_config_by_name("multi_objective_waves_3_params_2_objectives_half_pi_phase_difference")
    )
)
Exemplo n.º 6
0
    * velocity_convergence_threshold - when an incumbent's velocity drops below this threshold, it is assumed to have converged.
    * max_num_iterations - cap on the number of iterations. A failsafe - should be higher than what the algorithm needs to converge on average.
    * num_neighbors - how many random neighbors to generate for each incumbent?
    * num_cached_good_params - how many good configurations should this optimizer cache for future use?
    * initial_points_pareto_weight - what proportion of initial points should come from the pareto frontier?
    * initial_points_cached_good_params_weight - what proportion of initial points should come from the good params cache?
    * initial_points_random_params_weight - what proportion of initial points should be randomly generated?
    """)

random_near_incumbent_optimizer_config_store.add_config_by_name(
    config_name="20_incumbents_50_neighbors",
    config_point=Point(num_starting_configs=20,
                       initial_velocity=0.2,
                       velocity_update_constant=0.3,
                       velocity_convergence_threshold=0.01,
                       max_num_iterations=15,
                       num_neighbors=50,
                       num_cached_good_params=2**10,
                       initial_points_pareto_weight=0.5,
                       initial_points_cached_good_params_weight=0.3,
                       initial_points_random_params_weight=0.2),
    description="More thorough and more expensive than the default.")


class RandomNearIncumbentOptimizer(UtilityFunctionOptimizer):
    """ Searches the utility function for maxima using the random near incumbent strategy.

        Starting from an incumbent configuration, this optimizer creates a 'cloud' of random points in the incumbent's vicinity
    and evaluates the utility function for each of these points. If any of the new points has a higher utility value than the
    incumbent then it gets promoted to the incumbent and we repeat the process.