# BBopt setup: from bbopt import BlackBoxOptimizer bb = BlackBoxOptimizer(file=__file__) if __name__ == "__main__": bb.run_backend( "mixture", distribution=[ ("gaussian_process", float("inf")), ("tree_structured_parzen_estimator", 1), ], remove_erroring_algs=True, ) # Set some parameters that skopt supports. x0 = bb.randint("x0", 1, 10, guess=5) x1 = bb.choice("x1", [-10, -1, 0, 1, 10]) # Set a parameter that only hyperopt supports. x2 = bb.normalvariate("x2", mu=0, sigma=1) if not bb.is_serving: assert bb.backend.selected_alg == "tree_structured_parzen_estimator", bb.backend.selected_alg # Set the goal. y = x0 + x1 * x2 bb.minimize(y) # Print out the value we used for debugging purposes. if __name__ == "__main__": print(repr(y))
class TrainOptimizeScript(TrainScript): Usage(''' Use black box optimization to tune model hyperparameters :-t --trials-name str - Filename to save hyperparameter optimization trials in '.bbopt.json' will automatically be appended :-c --cycles int 20 Number of cycles of optimization to run :-m --model str .cache/optimized.net Model to load from ... ''') | TrainScript.usage def __init__(self, args): super().__init__(args) from bbopt import BlackBoxOptimizer self.bb = BlackBoxOptimizer(file=self.args.trials_name) if not self.test: data = TrainData.from_both(self.args.tags_file, self.args.tags_folder, self.args.folder) _, self.test = data.load(False, True) from keras.callbacks import ModelCheckpoint for i in list(self.callbacks): if isinstance(i, ModelCheckpoint): self.callbacks.remove(i) def process_args(self, args: Any): model_parts = glob(splitext(args.model)[0] + '.*') if len(model_parts) < 5: for name in model_parts: if isfile(name): remove(name) else: rmtree(name) args.trials_name = args.trials_name.replace('.bbopt.json', '').replace('.json', '') if not args.trials_name: if isfile(join('.cache', 'trials.bbopt.json')): remove(join('.cache', 'trials.bbopt.json')) args.trials_name = join('.cache', 'trials') def run(self): print('Writing to:', self.args.trials_name + '.bbopt.json') for i in range(self.args.cycles): self.bb.run(backend="random") print("\n= %d = (example #%d)" % (i + 1, len(self.bb.get_data()["examples"]) + 1)) params = ModelParams(recurrent_units=self.bb.randint("units", 1, 70, guess=50), dropout=self.bb.uniform("dropout", 0.1, 0.9, guess=0.6), extra_metrics=self.args.extra_metrics, skip_acc=self.args.no_validation, loss_bias=1.0 - self.args.sensitivity) print('Testing with:', params) model = create_model(self.args.model, params) model.fit(*self.sampled_data, batch_size=self.args.batch_size, epochs=self.epoch + self.args.epochs, validation_data=self.test * (not self.args.no_validation), callbacks=self.callbacks, initial_epoch=self.epoch) resp = model.evaluate(*self.test, batch_size=self.args.batch_size) if not isinstance(resp, (list, tuple)): resp = [resp, None] test_loss, test_acc = resp predictions = model.predict(self.test[0], batch_size=self.args.batch_size) num_false_positive = numpy.sum(predictions * (1 - self.test[1]) > 0.5) num_false_negative = numpy.sum( (1 - predictions) * self.test[1] > 0.5) false_positives = num_false_positive / numpy.sum( self.test[1] < 0.5) false_negatives = num_false_negative / numpy.sum( self.test[1] > 0.5) from math import exp param_score = 1.0 / (1.0 + exp( (model.count_params() - 11000) / 2000)) fitness = param_score * (1.0 - 0.2 * false_negatives - 0.8 * false_positives) self.bb.remember({ "test loss": test_loss, "test accuracy": test_acc, "false positive%": false_positives, "false negative%": false_negatives, "fitness": fitness }) print("False positive: ", false_positives * 100, "%") self.bb.maximize(fitness) pprint(self.bb.get_current_run()) best_example = self.bb.get_optimal_run() print("\n= BEST = (example #%d)" % self.bb.get_data()["examples"].index(best_example)) pprint(best_example)
""" Example of using the random backend with BBopt. To run this example, just run: > bbopt ./random_example.py """ # BBopt setup: from bbopt import BlackBoxOptimizer bb = BlackBoxOptimizer(file=__file__) if __name__ == "__main__": bb.run(alg="random") # Let's use some parameters! x = bb.randint("x", 1, 10) # And let's set our goal! bb.maximize(x) # Finally, we'll print out the value we used for debugging purposes. if __name__ == "__main__": print(x)