Beispiel #1
0
"""
Example of using the mixture backend's remove_erroring_algs feature.

To run this example, just run:
    > bbopt ./remove_erroring_algs_example.py
"""

# BBopt setup:
from bbopt import BlackBoxOptimizer
bb = BlackBoxOptimizer(file=__file__)
if __name__ == "__main__":
    bb.run_backend(
        "mixture",
        distribution=[
            ("gaussian_process", float("inf")),
            ("tree_structured_parzen_estimator", 1),
        ],
        remove_erroring_algs=True,
    )

# Set some parameters that skopt supports.
x0 = bb.randint("x0", 1, 10, guess=5)
x1 = bb.choice("x1", [-10, -1, 0, 1, 10])

# Set a parameter that only hyperopt supports.
x2 = bb.normalvariate("x2", mu=0, sigma=1)

if not bb.is_serving:
    assert bb.backend.selected_alg == "tree_structured_parzen_estimator", bb.backend.selected_alg

# Set the goal.
Beispiel #2
0
"""
Example using a mixture distribution over many different possible algorithms.

To run this example, just run:
    > bbopt ./mixture_example.py
"""

# BBopt setup:
from bbopt import BlackBoxOptimizer
bb = BlackBoxOptimizer(file=__file__)
if __name__ == "__main__":
    bb.run_backend("mixture", [
        ("random", 1),
        ("tree_structured_parzen_estimator", 1),
        ("annealing", 1),
        ("gaussian_process", 1),
        ("random_forest", 1),
        ("extra_trees", 1),
        ("gradient_boosted_regression_trees", 1),
    ])

# If we're not serving, store which algorithm the
#  mixture backend has selected.
from bbopt.backends.mixture import MixtureBackend
if isinstance(bb.backend, MixtureBackend):
    bb.remember({
        "alg": bb.backend.selected_alg,
    })

# Set up a parameter from a choice and a random sample.
xs = bb.sample("xs", range(10), 5, guess=[3, 4, 5, 6, 7])