Пример #1
0
def main():

    instrum = ng.Instrumentation(alpha1=ng.var.Array(1).asscalar(),
                                 alpha2=ng.var.Array(1).asscalar(),
                                 beta1=ng.var.Array(1).asscalar(),
                                 beta2=ng.var.Array(1).asscalar())
    optimizer = ng.optimizers.CMA(instrumentation=instrum, budget=100)

    for i in range(optimizer.budget):
        try:
            x = optimizer.ask()
            value = score_ngca_algorithm_on_clover_data_by_kmeans(
                *x.args, **x.kwargs)
            print('{} out of {} - value {}'.format(i, optimizer.budget, value))
            optimizer.tell(x, value)
        except:
            print('{} out of {} - error'.format(i, optimizer.budget))

    recommendation = optimizer.provide_recommendation()

    print('Optimal params:')
    print(recommendation.kwargs)

    print('Optimal Score on optimal params:')
    # score result
    score = score_ngca_algorithm_on_clover_data_by_kmeans(
        recommendation.kwargs['alpha1'], recommendation.kwargs['alpha2'],
        recommendation.kwargs['beta1'], recommendation.kwargs['beta2'])
    utilities.print_score_fixed(score)

    evaluate_test_data_by_kmeans(recommendation.kwargs)
Пример #2
0
 def __init__(self, offset):
     super().__init__(self.oracle_call, ng.Instrumentation(ng.var.Scalar()))
     self.register_initialization(
         offset=offset)  # to create equivalent instances through "copy"
     self.offset = offset
     # add your own function descriptors (from base class, we already get "dimension" etc...)
     # those will be recorded during benchmarks
     self._descriptors.update(offset=offset)
Пример #3
0
def hparam_opt(config: dict, hparam_space: dict, backend: str, train_dataset=None, test_dataset=None, global_config: dict=global_config, optimizer_name: str="OnePlusOne", budget: int=20, mode="max"):
    """Conduct a hypeparameter space search"""
    
    if backend == "lightning":
        training_function = LightningTrainingContainer(config)
    else:
        training_function = TrainingContainer(config, train_dataset, test_dataset, global_config, backend, mode=mode)
    
    hparam_vars = {key: ng.instrumentation.var.OrderedDiscrete(value) for key, value in hparam_space.items()}
    instrumentation = ng.Instrumentation(**hparam_vars)
    print(instrumentation)
    
    optimizer = ng.optimizers.registry[optimizer_name](instrumentation=instrumentation, budget=budget)
    
    return optimizer.minimize(training_function)
    def _assemble_instrumentation(self, parameters=None):
        """ Assemble nevergrad.Instrumentation object from `parameters` list.

        Parameters
        ----------
        parameters: List(BaseMCOParameter)
            parameter objects containing lower and upper numerical bounds

        Returns
        ----------
        instrumentation: ng.Instrumentation
        """
        if parameters is None:
            parameters = self.parameters

        instrumentation = [
            self._create_instrumentation_variable(p) for p in parameters
        ]
        return ng.Instrumentation(*instrumentation)
Пример #5
0
def find_detection_params_for_image(v):
    cache = {}

    def detection_loss(gauss, d_diam, e_diam, thresh):
        params = gauss, d_diam, e_diam, thresh

        if params in cache.keys():
            return cache[params]

        image = do_detection(v, gauss, d_diam, e_diam, thresh)

        prop = regionprops(label(image))
        c1 = np.array([p.area for p in prop], dtype=np.float32)

        if len(c1) < 3:
            return np.inf

        # Detection loss

        ratio1 = -np.log(np.sum(v[image > 0.]) / np.sum(v))
        ratio2 = -np.log(
            np.sum(v[image > 0.]) / float(np.count_nonzero(image)))

        image_loss = ratio1 + ratio2

        cache[params] = image_loss

        return image_loss

    bounds = ng.Instrumentation(
        ng.var.Scalar(float).bounded(0., 10.),
        ng.var.Scalar(int).bounded(0, 10),
        ng.var.Scalar(int).bounded(0, 10),
        ng.var.Scalar(float).bounded(0., 1.))

    optimizer = ng.optimizers.PortfolioDiscreteOnePlusOne(
        instrumentation=bounds, budget=4000)

    recommendation = optimizer.minimize(detection_loss)
    opt_params = recommendation.args

    return opt_params
Пример #6
0
def test_log_parameters(tmp_path: Path) -> None:
    filepath = tmp_path / "logs.txt"
    cases = [0, np.int(1), np.float(2.0), np.nan, float("inf"), np.inf]
    instrum = ng.Instrumentation(ng.var.Array(1),
                                 ng.var.Scalar(),
                                 blublu=ng.var.SoftmaxCategorical(cases),
                                 array=ng.var.Array(3, 2))
    optimizer = optimizerlib.OnePlusOne(instrumentation=instrum, budget=32)
    optimizer.register_callback("tell", callbacks.ParametersLogger(filepath, delete_existing_file=True))
    optimizer.minimize(_func, verbosity=2)
    # pickling
    logger = callbacks.ParametersLogger(filepath)
    logs = logger.load_flattened()
    assert len(logs) == 32
    assert isinstance(logs[-1]["#arg1"], float)
    assert len(logs[-1]) == 16
    logs = logger.load_flattened(max_list_elements=2)
    assert len(logs[-1]) == 12
    # deletion
    logger = callbacks.ParametersLogger(filepath, delete_existing_file=True)
    assert not logger.load()
Пример #7
0
def main():

    # Optimize params on test and validation datasets
    instrum = ng.Instrumentation(
        alpha1=ng.var.Array(1).asscalar(),
        alpha2=ng.var.Array(1).asscalar(),
        beta1=ng.var.Array(1).bounded(a_min=0, a_max=constant.MAX_BETA_VALUE),
        beta2=ng.var.Array(1).bounded(a_min=0, a_max=constant.MAX_BETA_VALUE))
    optimizer = ng.optimizers.OnePlusOne(instrumentation=instrum, budget=100)
    # recommendation = optimizer.minimize(score_ngca_on_oil_data_by_kmeans)

    # ask and tell
    for i in range(optimizer.budget):
        try:
            print('{} out of {}'.format(i, optimizer.budget))
            x = optimizer.ask()
            value = score_ngca_on_oil_data_by_kmeans(*x.args, **x.kwargs)
            optimizer.tell(x, value)
        except:
            print('Error')

    recommendation = optimizer.provide_recommendation()

    print('Optimal params:')
    print(recommendation.kwargs)

    print('Optimal Score on train and validation data:')
    # score result
    score = score_ngca_on_oil_data_by_kmeans(recommendation.kwargs['alpha1'],
                                             recommendation.kwargs['alpha2'],
                                             recommendation.kwargs['beta1'],
                                             recommendation.kwargs['beta2'])
    utilities.print_score_fixed(score)

    # Run algorithm with optimal params on test data and evaluate score
    # Plot projected test data on the result NG subspace
    evaluate_test_data_by_kmeans(recommendation.kwargs)

    return
    def __init__(self, environment, random_seed, policy_factory,
                 create_rollout, optimizer, budget, low, high):
        self._environment = environment
        self._random_seed = random_seed
        self._policy_factory = policy_factory
        self._create_rollout = create_rollout
        self._budget = budget
        self._low = low
        self._high = high
        self._deterministic_update_policy = True

        self._observation_space = environment.observation_space
        self._action_space = environment.action_space
        self._policy = self._create_policy()
        self._policy_return = -np.inf
        self._policy_steps = -np.inf

        self._shape = self._policy.get_parameters()['model'].shape
        self._dims = np.prod(self._shape)
        instrumentation = ng.Instrumentation(
            ng.var.Array(self._dims).bounded(low, high))
        self._optimizer = optimizer(instrumentation=instrumentation,
                                    budget=budget)
Пример #9
0
def main():
    cppn = get_model()
    cppn.summary()
    print("nevergrad optimizers:")
    print(list(sorted(ng.optimizers.registry.keys())))
    print("cppn weight shapes:")
    [print(weight.shape) for weight in cppn.trainable_weights]
    vars = [ng.var.Array(*tuple(weight.shape)).bounded(-10, 10) for weight in cppn.trainable_weights]
    optimizer = ng.optimizers.registry["RotationInvariantDE"](instrumentation=ng.Instrumentation(*vars))
    model_number = 0
    best_loss = 123456789.
    mnist = tfds.load(name="mnist", split=tfds.Split.TRAIN).repeat().shuffle(128).batch(BATCH_SIZE)
    max_batches = BATCHES_PER_TEST - 2
    while True:
        weights = optimizer.ask()
        cppn.set_weights([tf.convert_to_tensor(arg, dtype=tf.float32) for arg in weights.args])
        total_loss = test(cppn, mnist, max_batches).numpy().item()
        optimizer.tell(weights, total_loss)
        if total_loss < best_loss:
            best_loss = total_loss
            cppn.save("./best.h5")
        print(model_number, "loss:", total_loss, "best:", best_loss)
        model_number += 1
Пример #10
0
def get_instrumentation_from_config(cfg):
    var_type = ng.instrumentation.utils.Variable
    var_keys = cfg.nested_get_key_fn(lambda x: isinstance(x, var_type))
    search_vars = {key: cfg.get_with_dot_str(key) for key in var_keys}
    return ng.Instrumentation(**search_vars)
Пример #11
0
count = 0

target_density = np.linspace(0, 1, N_ADSORP+1)

def fitness(*grid):
    global count
    count += 1
    print(count)
    grid = np.array(grid)
    density = (run_dft_fast(grid))[:41].reshape(41,)
    return np.sum(np.abs(density-target_density))

iargs = []
for _ in range(400):
    arg1 = ng.var.OrderedDiscrete([0,1])
    iargs.append(arg1)
instrum = ng.Instrumentation(*iargs)

# optimizer = ng.optimizers.PSO(instrumentation=instrum, budget=300)
# recommendation = optimizer.minimize(fitness)
# print(recommendation)
# grid = recommendation.data

# tp = (0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1)
# grid = np.array(tp)
grid = np.array([1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1, 1,1,1,1,1,1,1,1,0,0,0,0,0,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,0,0,0,0,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,0,0,0,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1])
density = (run_dft_fast(grid))[:41].reshape(41,)
plt.plot(np.linspace(0, 1, 41), density)
plt.show()
Пример #12
0
    user="******",
    passwd=os.environ['MYSQL_PASS'],
    db="dqn_results")
cur = db.cursor()

experimentName = "soft-actor-critic-fixed-entropy"

entropyCoefficientArg = ng.instrumentation.variables.Gaussian(mean=-2, std=2)
learningRateArg = ng.instrumentation.variables.Gaussian(mean=-3, std=2)
rewardScalingArg = ng.instrumentation.variables.Gaussian(mean=-3, std=2)
actionScalingArg = ng.instrumentation.variables.Gaussian(mean=-1, std=1)
weightRegularizationConstantArg = ng.instrumentation.variables.Gaussian(
    mean=-2, std=1)

instrumentation = ng.Instrumentation(entropyCoefficientArg, learningRateArg,
                                     rewardScalingArg, actionScalingArg,
                                     weightRegularizationConstantArg)
optimizer = ng.optimizers.registry["TBPSA"](instrumentation=instrumentation,
                                            budget=os.environ['BUDGET'])

cur.execute("select x1, x2, x3, x4, x5, y from experiments where label = '" +
            experimentName + "'")
result = cur.fetchall()
for row in result:
    candidate = optimizer.create_candidate.from_call(np.log10(float(row[0])),
                                                     np.log10(float(row[1])),
                                                     np.log10(float(row[2])),
                                                     np.log10(float(row[3])),
                                                     np.log10(float(row[4])))
    optimizer.tell(candidate, -float(row[5]))
Пример #13
0
import nevergrad as ng
import numpy as np
from ray.tune import schedulers
from ray.tune.suggest.nevergrad import NevergradSearch

budget = 400

opt_args = {
    'scheduler.target_lrs': ng.var.Log(1e-4, 1e-2),
    'scheduler.final_lr': ng.var.Log(1e-7, 1e-4),
    'optimizer.weight_decay': ng.var.Log(1e-7, 1e-3),
    'scheduler.exp_factor': ng.var.Scalar().bounded(-.5, 2),
}

inst = ng.Instrumentation(**opt_args)
opt = ng.optimizers.registry['CMA'](instrumentation=inst, budget=budget)
search_alg = NevergradSearch(opt,
                             None,
                             max_concurrent=4,
                             reward_attr='valid_accuracy',
                             mode='max')

exp_args = {
    'resume': False,
    'search_alg': search_alg,
    'scheduler': schedulers.FIFOScheduler(),
    'stop': {
        'is_finished': True
    },
    'verbose': True,
    'num_samples': budget,
Пример #14
0
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

from pathlib import Path
import nevergrad as ng
from nevergrad.instrumentation import FolderFunction

if __name__ == "__main__":
    folder = Path(__file__).parents[1] / "instrumentation" / "examples"
    func = FolderFunction(folder, ["python", "examples/script.py"],
                          clean_copy=True)
    instrumentation = ng.Instrumentation(value1=ng.var.Array(1).asscalar(),
                                         value2=12,
                                         string=ng.var.SoftmaxCategorical(
                                             ["plop", "blublu", "plouf"]))
    opt = ng.optimizers.registry["OnePlusOne"](instrumentation, budget=4)
    opt.minimize(func)
    ng.families.ParametrizedOnePlusOne()