args.weights) best_model.compile(optimizer=Adam(lr=best_lr), loss="binary_crossentropy") print("Model built") final_pred = best_model.predict_classes(X=Xs["test"], batch_size=best_batch_size, verbose=0) test_perf = eval_pred(ys["test"], final_pred) print("Test labels:") print(list(ys["test"])) print("Predictions") print([x[0] for x in final_pred]) print("Test perf: {}".format(test_perf)) if args.mode == "aggregate": best_model.save_weights(os.path.join(save_path, "best_weights.h5"), overwrite=True) summary = { "best_lr": best_lr, "best_epochs": best_epochs, "best_dropout": best_dropout, "best_dense_layers": best_dense_layers, "best_dense_layer_units": best_dense_layer_units, "best_batch_size": best_batch_size, "ensemble_size": args.ensemble_size, "test_ensemble_size": args.test_ensemble_size, "test_perf": test_perf } print("\n".join(map(lambda x: "{}: {}".format(x[0], x[1]), summary.items())), file=open(os.path.join(save_path, "summary.txt"), "w"))
print("Building model") model = ShallowNet(Xs["train"].shape[1], best_dropout, best_dense_layers, best_dense_layer_units, args.weights) model.compile(optimizer=Adam(lr=best_lr), loss="binary_crossentropy") print("Model built") history = model.fit( X=np.concatenate((Xs["train"], Xs["val"])), y=np.concatenate((ys["train"], ys["val"])), batch_size=best_batch_size, nb_epoch=best_epochs, verbose=1, shuffle=True, show_accuracy=True, ) model.save_weights(os.path.join(save_path, "weights{}.h5".format(i)), overwrite=True) print("\n".join(map(str, history.history["acc"])), file=open(os.path.join(save_path, "train_accs{}.txt".format(i)), "w")) print("\n".join(map(str, history.history["loss"])), file=open(os.path.join(save_path, "train_losses{}.txt".format(i)), "w")) pred = model.predict_classes(X=Xs["test"], batch_size=batch_size, verbose=0) preds[:, i] = pred[:, 0] final_pred = mode(preds, axis=1).mode test_perf = eval_pred(ys["test"], final_pred) else: print("Building model") model = ShallowNet(Xs["train"].shape[1], best_dropout, best_dense_layers, best_dense_layer_units, args.weights) model.compile(optimizer=Adam(lr=best_lr), loss="binary_crossentropy") print("Model built") test_perf = eval_model(model, best_batch_size, Xs["test"], ys["test"])
best_dense_layers, best_dense_layer_units, args.weights) model.compile(optimizer=Adam(lr=best_lr), loss="binary_crossentropy") print("Model built") history = model.fit( X=np.concatenate((Xs["train"], Xs["val"])), y=np.concatenate((ys["train"], ys["val"])), batch_size=best_batch_size, nb_epoch=best_epochs, verbose=1, shuffle=True, show_accuracy=True, ) model.save_weights(os.path.join(save_path, "weights{}.h5".format(i)), overwrite=True) print("\n".join(map(str, history.history["acc"])), file=open(os.path.join(save_path, "train_accs{}.txt".format(i)), "w")) print("\n".join(map(str, history.history["loss"])), file=open( os.path.join(save_path, "train_losses{}.txt".format(i)), "w")) pred = model.predict_classes(X=Xs["test"], batch_size=batch_size, verbose=0) preds[:, i] = pred[:, 0] final_pred = mode(preds, axis=1).mode test_perf = eval_pred(ys["test"], final_pred)
best_val_perf = val_perf best_model = model else: print("Building model") best_model = ShallowNet(Xs["train"].shape[1], best_dropout, best_dense_layers, best_dense_layer_units, args.weights) best_model.compile(optimizer=Adam(lr=best_lr), loss="binary_crossentropy") print("Model built") final_pred = best_model.predict_classes(X=Xs["test"], batch_size=best_batch_size, verbose=0) test_perf = eval_pred(ys["test"], final_pred) print("Test labels:") print(list(ys["test"])) print("Predictions") print([x[0] for x in final_pred]) print("Test perf: {}".format(test_perf)) if args.mode == "aggregate": best_model.save_weights(os.path.join(save_path, "best_weights.h5"), overwrite=True) summary = { "best_lr": best_lr, "best_epochs": best_epochs, "best_dropout": best_dropout, "best_dense_layers": best_dense_layers, "best_dense_layer_units": best_dense_layer_units, "best_batch_size": best_batch_size, "ensemble_size": args.ensemble_size, "test_ensemble_size": args.test_ensemble_size, "test_perf": test_perf } print("\n".join(map(lambda x: "{}: {}".format(x[0], x[1]), summary.items())), file=open(os.path.join(save_path, "summary.txt"), "w"))