예제 #1
0
if __name__ == "__main__":
    bb.run_backend("mixture", [
        ("random", 1),
        ("tree_structured_parzen_estimator", 1),
        ("annealing", 1),
        ("gaussian_process", 1),
        ("random_forest", 1),
        ("extra_trees", 1),
        ("gradient_boosted_regression_trees", 1),
    ])

# If we're not serving, store which algorithm the
#  mixture backend has selected.
from bbopt.backends.mixture import MixtureBackend
if isinstance(bb.backend, MixtureBackend):
    bb.remember({
        "alg": bb.backend.selected_alg,
    })

# Set up a parameter from a choice and a random sample.
xs = bb.sample("xs", range(10), 5, guess=[3, 4, 5, 6, 7])
y = bb.choice("y", [1, 10, 100], guess=10)

# Set the goal to be the absolute difference of sum(xs) and y.
loss = abs(sum(xs) - y)
bb.minimize(loss)

# Finally, we'll print out the value we used for debugging purposes.
if __name__ == "__main__":
    print(repr(loss))
예제 #2
0
class TrainOptimizeScript(TrainScript):
    Usage('''
        Use black box optimization to tune model hyperparameters

        :-t --trials-name str -
            Filename to save hyperparameter optimization trials in
            '.bbopt.json' will automatically be appended

        :-c --cycles int 20
            Number of cycles of optimization to run

        :-m --model str .cache/optimized.net
            Model to load from

        ...
    ''') | TrainScript.usage

    def __init__(self, args):
        super().__init__(args)
        from bbopt import BlackBoxOptimizer
        self.bb = BlackBoxOptimizer(file=self.args.trials_name)
        if not self.test:
            data = TrainData.from_both(self.args.tags_file,
                                       self.args.tags_folder, self.args.folder)
            _, self.test = data.load(False, True)

        from keras.callbacks import ModelCheckpoint
        for i in list(self.callbacks):
            if isinstance(i, ModelCheckpoint):
                self.callbacks.remove(i)

    def process_args(self, args: Any):
        model_parts = glob(splitext(args.model)[0] + '.*')
        if len(model_parts) < 5:
            for name in model_parts:
                if isfile(name):
                    remove(name)
                else:
                    rmtree(name)
        args.trials_name = args.trials_name.replace('.bbopt.json',
                                                    '').replace('.json', '')
        if not args.trials_name:
            if isfile(join('.cache', 'trials.bbopt.json')):
                remove(join('.cache', 'trials.bbopt.json'))
            args.trials_name = join('.cache', 'trials')

    def run(self):
        print('Writing to:', self.args.trials_name + '.bbopt.json')
        for i in range(self.args.cycles):
            self.bb.run(backend="random")
            print("\n= %d = (example #%d)" %
                  (i + 1, len(self.bb.get_data()["examples"]) + 1))

            params = ModelParams(recurrent_units=self.bb.randint("units",
                                                                 1,
                                                                 70,
                                                                 guess=50),
                                 dropout=self.bb.uniform("dropout",
                                                         0.1,
                                                         0.9,
                                                         guess=0.6),
                                 extra_metrics=self.args.extra_metrics,
                                 skip_acc=self.args.no_validation,
                                 loss_bias=1.0 - self.args.sensitivity)
            print('Testing with:', params)
            model = create_model(self.args.model, params)
            model.fit(*self.sampled_data,
                      batch_size=self.args.batch_size,
                      epochs=self.epoch + self.args.epochs,
                      validation_data=self.test *
                      (not self.args.no_validation),
                      callbacks=self.callbacks,
                      initial_epoch=self.epoch)
            resp = model.evaluate(*self.test, batch_size=self.args.batch_size)
            if not isinstance(resp, (list, tuple)):
                resp = [resp, None]
            test_loss, test_acc = resp
            predictions = model.predict(self.test[0],
                                        batch_size=self.args.batch_size)

            num_false_positive = numpy.sum(predictions *
                                           (1 - self.test[1]) > 0.5)
            num_false_negative = numpy.sum(
                (1 - predictions) * self.test[1] > 0.5)
            false_positives = num_false_positive / numpy.sum(
                self.test[1] < 0.5)
            false_negatives = num_false_negative / numpy.sum(
                self.test[1] > 0.5)

            from math import exp
            param_score = 1.0 / (1.0 + exp(
                (model.count_params() - 11000) / 2000))
            fitness = param_score * (1.0 - 0.2 * false_negatives -
                                     0.8 * false_positives)

            self.bb.remember({
                "test loss": test_loss,
                "test accuracy": test_acc,
                "false positive%": false_positives,
                "false negative%": false_negatives,
                "fitness": fitness
            })

            print("False positive: ", false_positives * 100, "%")

            self.bb.maximize(fitness)
            pprint(self.bb.get_current_run())
        best_example = self.bb.get_optimal_run()
        print("\n= BEST = (example #%d)" %
              self.bb.get_data()["examples"].index(best_example))
        pprint(best_example)
예제 #3
0
"""

# BBopt setup:
from bbopt import BlackBoxOptimizer
bb = BlackBoxOptimizer(file=__file__)
if __name__ == "__main__":
    bb.run()  # alg="any_fast" should be the default

# We set u ~ dist(0, 1) * sin(dist(0, 1)) where dist is uniform or normal.
from math import sin
dist = bb.choice("dist", ["uniform", "normal"])
if dist == "normal":
    u = bb.normalvariate("x0_n", 0, 1) * sin(bb.normalvariate("x1_n", 0, 1))
else:
    u = bb.random("x0_u") * sin(bb.random("x1_u"))

# If we used hyperopt-only parameters, we shouldn't have skopt.
if hasattr(bb.backend, "selected_backend"):
    bb.remember({"backend": bb.backend.selected_backend})
    if dist == "normal":
        assert bb.backend.selected_backend != "scikit-optimize", bb.backend.selected_backend
else:
    bb.remember({"backend": bb.backend.backend_name})

# Set u as the thing to minimize.
bb.minimize(u)

# Print out the value we used for debugging purposes.
if __name__ == "__main__":
    print(repr(u))