def test_all_points_different(strategy, surrogate):
    """
    Tests whether the parallel optimizer always generates
    different points to evaluate.

    Parameters
    ----------
    * `strategy` [string]:
        Name of the strategy to use during optimization.

    * `surrogate` [scikit-optimize surrogate class]:
        A class of the scikit-optimize surrogate used in Optimizer.
    """
    optimizer = Optimizer(
        base_estimator=surrogate(),
        dimensions=[Real(-5.0, 10.0), Real(0.0, 15.0)],
        acq_optimizer='sampling',
        random_state=1
    )

    tolerance = 1e-3  # distance above which points are assumed same
    for i in range(n_steps):
        x = optimizer.ask(n_points, strategy)
        optimizer.tell(x, [branin(v) for v in x])
        distances = pdist(x)
        assert all(distances > tolerance)
def test_reproducible_runs(strategy, surrogate):
    # two runs of the optimizer should yield exactly the same results

    optimizer = Optimizer(
        base_estimator=surrogate(random_state=1),
        dimensions=[Real(-5.0, 10.0), Real(0.0, 15.0)],
        acq_optimizer='sampling',
        random_state=1
    )

    points = []
    for i in range(n_steps):
        x = optimizer.ask(n_points, strategy)
        points.append(x)
        optimizer.tell(x, [branin(v) for v in x])

    # the x's should be exaclty as they are in `points`
    optimizer = Optimizer(
        base_estimator=surrogate(random_state=1),
        dimensions=[Real(-5.0, 10.0), Real(0.0, 15.0)],
        acq_optimizer='sampling',
        random_state=1
    )
    for i in range(n_steps):
        x = optimizer.ask(n_points, strategy)

        assert points[i] == x

        optimizer.tell(x, [branin(v) for v in x])
def test_same_set_of_points_ask(strategy, surrogate):
    """
    For n_points not None, tests whether two consecutive calls to ask
    return the same sets of points.

    Parameters
    ----------
    * `strategy` [string]:
        Name of the strategy to use during optimization.

    * `surrogate` [scikit-optimize surrogate class]:
        A class of the scikit-optimize surrogate used in Optimizer.
    """

    optimizer = Optimizer(
        base_estimator=surrogate(),
        dimensions=[Real(-5.0, 10.0), Real(0.0, 15.0)],
        acq_optimizer='sampling',
        random_state=2
    )

    for i in range(n_steps):
        xa = optimizer.ask(n_points, strategy)
        xb = optimizer.ask(n_points, strategy)
        optimizer.tell(xa, [branin(v) for v in xa])
        assert_equal(xa, xb)  # check if the sets of points generated are equal
def test_get_constraints():
    # Test that the get_constraints() function returns the constraints that were set byt set_constraints()
    space = Space([Real(1, 10)])
    cons_list = [Single(0,5.0,'real')]
    cons = Constraints(cons_list,space)
    opt = Optimizer(space, "ET")
    opt.set_constraints(cons)
    assert_equal(cons,opt.get_constraints())
Ejemplo n.º 5
0
def test_purely_categorical_space():
    # Test reproduces the bug in #908, make sure it doesn't come back
    dims = [Categorical(['a', 'b', 'c']), Categorical(['A', 'B', 'C'])]
    optimizer = Optimizer(dims, n_initial_points=1, random_state=3)

    x = optimizer.ask()
    # before the fix this call raised an exception
    optimizer.tell(x, 1.)
Ejemplo n.º 6
0
def test_dump_and_load_optimizer():
    base_estimator = ExtraTreesRegressor(random_state=2)
    opt = Optimizer([(-2.0, 2.0)],
                    base_estimator,
                    n_random_starts=1,
                    acq_optimizer="sampling")

    opt.run(bench1, n_iter=3)

    with tempfile.TemporaryFile() as f:
        dump(opt, f)
        f.seek(0)
        load(f)
Ejemplo n.º 7
0
def run(body) -> dict:
    """Executes the ProcessOptimizer
    
    Returns
    -------
    dict
        a JSON encodable dictionary representation of the result.
    """
    # print("Receive: " + str(body))
    data = [(run["xi"], run["yi"]) for run in body["data"]]
    cfg = body["optimizerConfig"]
    extras = {}
    if ("extras" in body):
        extras = body["extras"]
    # print("Received extras " + str(extras))
    space = [(convertNumberType(x["from"], x["type"]), convertNumberType(x["to"], x["type"])) if (x["type"] == "discrete" or x["type"] == "continuous") else tuple(x["categories"]) for x in cfg["space"]]
    dimensions = [x["name"] for x in cfg["space"]]
    hyperparams = {
        'base_estimator': cfg["baseEstimator"],
        'acq_func': cfg["acqFunc"],
        'n_initial_points': cfg["initialPoints"],
        'acq_func_kwargs': {'kappa': cfg["kappa"], 'xi': cfg["xi"]}
    }
    optimizer = Optimizer(space, **hyperparams)

    Xi = []
    Yi = []
    if data:
        Xi, Yi = map(list, zip(*data))
        result = optimizer.tell(Xi, Yi)
    else:
        result = {}
    
    response = processResult(result, optimizer, dimensions, cfg, extras, data, space)
    
    response["result"]["extras"]["parameters"] = {
        "dimensions": dimensions,
        "space": space,
        "hyperparams": hyperparams,
        "Xi": Xi,
        "Yi": Yi,
        "extras": extras
    }

    # It is necesarry to convert response to a json string and then back to 
    # dictionary because NumPy types are not serializable by default
    return json.loads(json_tricks.dumps(response))
def test_constant_liar_runs(strategy, surrogate, acq_func):
    """
    Tests whether the optimizer runs properly during the random
    initialization phase and beyond

    Parameters
    ----------
    * `strategy` [string]:
        Name of the strategy to use during optimization.

    * `surrogate` [scikit-optimize surrogate class]:
        A class of the scikit-optimize surrogate used in Optimizer.
    """
    optimizer = Optimizer(
        base_estimator=surrogate(),
        dimensions=[Real(-5.0, 10.0), Real(0.0, 15.0)],
        acq_func=acq_func,
        acq_optimizer='sampling',
        random_state=0
    )

    # test arguments check
    assert_raises(ValueError, optimizer.ask, {"strategy": "cl_maen"})
    assert_raises(ValueError, optimizer.ask, {"n_points": "0"})
    assert_raises(ValueError, optimizer.ask, {"n_points": 0})

    for i in range(n_steps):
        x = optimizer.ask(n_points=n_points, strategy=strategy)
        # check if actually n_points was generated
        assert_equal(len(x), n_points)

        if "ps" in acq_func:
            optimizer.tell(x, [[branin(v), 1.1] for v in x])
        else:
            optimizer.tell(x, [branin(v) for v in x])
Ejemplo n.º 9
0
def test_dict_list_space_representation():
    """
    Tests whether the conversion of the dictionary and list representation
    of a point from a search space works properly.
    """

    chef_space = {
        'Cooking time': (0, 1200),  # in minutes
        'Main ingredient':
        ['cheese', 'cherimoya', 'chicken', 'chard', 'chocolate', 'chicory'],
        'Secondary ingredient': ['love', 'passion', 'dedication'],
        'Cooking temperature': (-273.16, 10000.0)  # in Celsius
    }

    opt = Optimizer(dimensions=dimensions_aslist(chef_space))
    point = opt.ask()

    # check if the back transformed point and original one are equivalent
    assert_equal(point,
                 point_aslist(chef_space, point_asdict(chef_space, point)))
def test_lhs_with_constraints():
    # Test that constraints cant be set when lhs is active and that it can be set once the points are exhausted.
    space = Space([Real(1, 10)])
    cons_list = [Single(0,5.0,'real')]
    cons = Constraints(cons_list,space)
    opt = Optimizer(space, "ET",lhs = True,n_initial_points = 2)
    opt.tell([1],0) # Use one initial point so that one is still left
    with raises(RuntimeError): # Error should be thrown when tryin got set constraints
        opt.set_constraints(cons)
    opt = Optimizer(space, "ET",lhs = True,n_initial_points = 2)
    opt.tell([[1],[1]],[0,0]) # Use all of the two initial points
    opt.set_constraints(cons) # Now it should be possible to set constraints
def test_optimizer_with_constraints(acq_optimizer):
    base_estimator = 'GP'
    space = Space([
        Real(1, 10),
        Real(1, 10),
        Real(1, 10),
        Integer(0, 10),
        Integer(0, 10),
        Integer(0, 10),
        Categorical(list('abcdefg')),
        Categorical(list('abcdefg')),
        Categorical(list('abcdefg'))
    ])

    cons_list = [Single(0,5.0,'real'),Single(3,5,'integer')]
    cons_list_2 = [Single(0,4.0,'real'),Single(3,4,'integer')]
    cons = Constraints(cons_list,space)
    cons_2 = Constraints(cons_list_2,space)

    # Test behavior when not adding constraitns
    opt = Optimizer(space, base_estimator, acq_optimizer=acq_optimizer,n_initial_points = 5)

    # Test that constraint is None when no constraint has been set so far
    assert_equal(opt._constraints,None)

    # Test constraints are still None
    for _ in range(6):
        next_x= opt.ask()
        f_val = np.random.random()*100
        opt.tell(next_x, f_val)
    assert_equal(opt._constraints,None)
    opt.remove_constraints()
    assert_equal(opt._constraints,None)

    # Test behavior when adding constraints in an optimization setting
    opt = Optimizer(space, base_estimator, acq_optimizer=acq_optimizer,n_initial_points = 3)
    opt.set_constraints(cons)
    assert_equal(opt._constraints,cons)
    next_x= opt.ask()
    assert_equal(next_x[0],5.0)
    assert_equal(next_x[3],5)
    f_val = np.random.random()*100
    opt.tell(next_x, f_val)
    assert_equal(opt._constraints,cons)
    opt.set_constraints(cons_2)
    next_x= opt.ask()
    assert_equal(opt._constraints,cons_2)
    assert_equal(next_x[0],4.0)
    assert_equal(next_x[3],4)
    f_val = np.random.random()*100
    opt.tell(next_x, f_val)
    assert_equal(opt._constraints,cons_2)
    opt.remove_constraints()
    assert_equal(opt._constraints,None)
    next_x= opt.ask()
    assert_not_equal(next_x[0],4.0)
    assert_not_equal(next_x[0],5.0)
    f_val = np.random.random()*100
    opt.tell(next_x, f_val)
    assert_equal(opt._constraints,None)

    # Test that next_x is changed when adding constraints
    opt = Optimizer(space, base_estimator, acq_optimizer=acq_optimizer,n_initial_points = 3)
    assert_false(hasattr(opt,'_next_x'))
    for _ in range(4): # We exhaust initial points
        next_x= opt.ask()
        f_val = np.random.random()*100
        opt.tell(next_x, f_val)
    assert_true(hasattr(opt,'_next_x')) # Now next_x should be in optimizer
    assert_not_equal(next_x[0],4.0)
    assert_not_equal(next_x[0],5.0)
    next_x = opt._next_x
    opt.set_constraints(cons)
    assert_not_equal(opt._next_x,next_x) # Check that next_x has been changed
    assert_equal(opt._next_x[0],5.0)
    assert_equal(opt._next_x[3],5)
    next_x = opt._next_x
    opt.set_constraints(cons_2)
    assert_not_equal(opt._next_x,next_x)
    assert_equal(opt._next_x[0],4.0)
    assert_equal(opt._next_x[3],4)

    # Test that adding a Constraint or constraint_list gives the same
    opt = Optimizer(space, base_estimator, acq_optimizer=acq_optimizer,n_initial_points = 3)
    opt.set_constraints(cons_list)
    opt2 = Optimizer(space, base_estimator, acq_optimizer=acq_optimizer,n_initial_points = 3)
    opt2.set_constraints(cons)
    assert_equal(opt._constraints,opt2._constraints)

    # Test that constraints are satisfied
    opt = Optimizer(space, base_estimator, acq_optimizer=acq_optimizer,n_initial_points = 2)
    opt.set_constraints(cons)
    next_x= opt.ask()
    assert_equal(next_x[0],5.0)

    opt = Optimizer(space, base_estimator, acq_optimizer=acq_optimizer,n_initial_points = 2)
    next_x= opt.ask()
    assert_not_equal(next_x[0],5.0)
    f_val = np.random.random()*100
    opt.tell(next_x, f_val)
    opt.set_constraints(cons)
    next_x= opt.ask()
    assert_equal(next_x[0],5.0)
    assert_equal(next_x[3],5)
    opt.set_constraints(cons)
    next_x= opt.ask()
    f_val = np.random.random()*100
    opt.tell(next_x, f_val)
    opt.set_constraints(cons_2)
    next_x= opt.ask()
    assert_equal(next_x[0],4.0)
    assert_equal(next_x[3],4)
    f_val = np.random.random()*100
    opt.tell(next_x, f_val)
    assert_equal(next_x[0],4.0)
    assert_equal(next_x[3],4)
Ejemplo n.º 12
0
def test_n_random_starts_Optimizer():
    # n_random_starts got renamed in v0.4
    et = ExtraTreesRegressor(random_state=2)
    with pytest.deprecated_call():
        Optimizer([(0, 1.)], et, n_random_starts=10, acq_optimizer='sampling')