random_accuracy_df[['loss', 'accuracy']].plot() plt.title('Loss & Accuracy Per EPOCH For Random Model') plt.xlabel('EPOCH') plt.ylabel('Accruacy') plt.show() bayesian_tuner = BayesianOptimization(hypermodel, objective='accuracy', max_trials=10, seed=10, project_name='divorce test') bayesian_tuner.search(X_train.values, y_train.values.flatten(), epochs=10, validation_data=(X_test.values, y_test.values.flatten())) bayesian_params = bayesian_tuner.get_best_hyperparameters()[0] bayesian_model = bayesian_tuner.hypermodel.build(bayesian_params) bayesian_model.fit(X.values, y.values.flatten(), epochs=15) bayesian_accuracy_df = pd.DataFrame(bayesian_model.history.history) bayesian_accuracy_df[['loss', 'accuracy']].plot() plt.title('Loss & Accuracy Per EPOCH For Bayesian Optimisation Model') plt.xlabel('EPOCH') plt.ylabel('Accruacy') plt.show()
max_trials=TOTAL_TRIALS, objective=kerastuner.Objective("val_auc", direction="max"), executions_per_trial=EXECUTION_PER_TRIAL, directory=base_dir, project_name=exp_name ) history = tuner.search(train_gen, epochs=EPOCHS, validation_data=val_gen, callbacks = [es, cp], verbose =2, use_multiprocessing=False) # Save best model and weight best_model = tuner.get_best_models()[0] best_config = best_model.optimizer.get_config() best_hyperparameters = tuner.get_best_hyperparameters()[0].get_config() best_hyperparameters_values = tuner.get_best_hyperparameters()[0].values best_model.save(model_dir) best_model.save_weights(weight_dir) with open(os.path.join(param_dir, 'hyperparameters.txt'), "w") as text_file: text_file.write(str(best_hyperparameters)) pickle.dump(best_hyperparameters_values, open(os.path.join(param_dir,'hyperparameters.pickle'), 'wb')) print('Done')