Пример #1
0
    def set_basic_conf(self):
        instrumentation = 2
        parameter_names = ["height", "width"]
        optimizer = optimizerlib.OnePlusOne(instrumentation)

        def cost(space, reporter):
            reporter(
                mean_loss=(space["height"] - 14)**2 - abs(space["width"] - 3))

        search_alg = NevergradSearch(
            optimizer, parameter_names, metric="mean_loss", mode="min")
        return search_alg, cost
Пример #2
0
def test_torch_agent_function() -> None:
    mgame = envs.DoubleOSeven()
    game = mgame.with_agent(player_1=agents.RandomAgent(mgame)).as_single_agent()
    agent = agents.TorchAgent.from_module_maker(game, agents.DenseNet)
    runner = base.EnvironmentRunner(game)
    agentfunction = agents.TorchAgentFunction(agent, runner)
    instru = agentfunction.instrumentation
    args, kwargs = instru.data_to_arguments([0] * instru.dimension)
    assert not args
    value = agentfunction.compute(**kwargs)
    assert value in [0, 1]
    # optimization
    opt = optimizerlib.OnePlusOne(instru, budget=10)
    opt.minimize(agentfunction.compute)
Пример #3
0
def test_torch_agent_function() -> None:
    mgame = envs.DoubleOSeven()
    game = mgame.with_agent(player_1=agents.RandomAgent(mgame)).as_single_agent()
    agent = agents.TorchAgent.from_module_maker(game, agents.DenseNet)
    runner = base.EnvironmentRunner(game)
    agentfunction = agents.TorchAgentFunction(agent, runner)
    instru = agentfunction.parametrization
    args, kwargs = instru.spawn_child().set_standardized_data([0] * instru.dimension).value
    assert not args
    value = agentfunction.compute(**kwargs)
    assert value in [0, -1]   # negated reward, for minimization
    # optimization
    opt = optimizerlib.OnePlusOne(instru, budget=10)
    opt.minimize(agentfunction.compute)
Пример #4
0
    def set_basic_conf(self):
        instrumentation = 2
        parameter_names = ["height", "width"]
        optimizer = optimizerlib.OnePlusOne(instrumentation)

        def cost(space, reporter):
            reporter(loss=(space["height"] - 14)**2 - abs(space["width"] - 3))

        search_alg = NevergradSearch(
            optimizer,
            parameter_names,
            metric="loss",
            mode="min",
            max_concurrent=1000,  # Here to avoid breaking back-compat.
        )
        return search_alg, cost
Пример #5
0
if __name__ == "__main__":
    import argparse
    from nevergrad.optimization import optimizerlib

    parser = argparse.ArgumentParser()
    parser.add_argument("--smoke-test",
                        action="store_true",
                        help="Finish quickly for testing")
    args, _ = parser.parse_known_args()
    ray.init()

    config = {
        "num_samples": 10 if args.smoke_test else 50,
        "config": {
            "iterations": 100,
        },
        "stop": {
            "timesteps_total": 100
        }
    }
    optimizer = optimizerlib.OnePlusOne(dimension=2)
    algo = NevergradSearch(optimizer, ["height", "width"],
                           max_concurrent=4,
                           reward_attr="neg_mean_loss")
    scheduler = AsyncHyperBandScheduler(reward_attr="neg_mean_loss")
    run(easy_objective,
        name="nevergrad",
        search_alg=algo,
        scheduler=scheduler,
        **config)
Пример #6
0
from nevergrad.optimization import optimizerlib


def square(x):
    return (x - .5)**2


optimizer = optimizerlib.OnePlusOne(dimension=1, budget=100)
# alternatively, you can use optimizerlib.registry which is a dict containing all optimizer classes
recommendation = optimizer.optimize(square, executor=None, batch_mode=True)
Пример #7
0
    config = {
        "num_samples": 10 if args.smoke_test else 50,
        "config": {
            "iterations": 100,
        },
        "stop": {
            "timesteps_total": 100
        }
    }
    instrumentation = 2
    parameter_names = ["height", "width"]
    # With nevergrad v0.2.0+ the following is also possible:
    # from nevergrad import instrumentation as inst
    # instrumentation = inst.Instrumentation(
    #     height=inst.var.Array(1).bounded(0, 200).asfloat(),
    #     width=inst.var.OrderedDiscrete([0, 10, 20, 30, 40, 50]))
    # parameter_names = None  # names are provided by the instrumentation
    optimizer = optimizerlib.OnePlusOne(instrumentation)
    algo = NevergradSearch(optimizer,
                           parameter_names,
                           max_concurrent=4,
                           metric="mean_loss",
                           mode="min")
    scheduler = AsyncHyperBandScheduler(metric="mean_loss", mode="min")
    run(easy_objective,
        name="nevergrad",
        search_alg=algo,
        scheduler=scheduler,
        **config)
Пример #8
0
from nevergrad.instrumentation import FolderFunction
from nevergrad.optimization import optimizerlib
from concurrent import futures
folder = "/project/snoplus/machine_learning/hyperparameter"
command = ["python", "hyperparameter/ng_script.py"
           ]  # command to run from right outside the provided folder
func = FolderFunction(folder, command, clean_copy=True)
print(func.dimension)  # will print the number of variables of the function
optimizer = optimizerlib.OnePlusOne(dimension=func.dimension,
                                    budget=50,
                                    num_workers=1)
recommendation = optimizer.optimize(func, executor=None, batch_mode=True)
print(func.get_summary(recommendation))
Пример #9
0
from nevergrad.optimization import optimizerlib


def square(x):
    return (x - 0.75)**2


optimizer = optimizerlib.OnePlusOne(dimension=1, budget=100, num_workers=5)
recommendation = optimizer.optimize(square, executor=None, batch_mode=True)

print(recommendation)