def test_cnn_whale(self): X_train, Y_train, X_test, Y_test = data.load_whale_data( WHALE_TRAIN_DATA, WHALE_TEST_DATA) # X_train = [] # Y_train = [] # X_test = [] # Y_test = [] kwargs = { "X_train": X_train, "Y_train": Y_train, "X_test": X_test, "Y_test": Y_test, "input_shape": (1, 192, 192), "nb_classes": 477, "nb_convo_layers": 2, "nb_filters": [32, 32], "nb_conv": [3, 3], "convo_activations": ["relu", "relu"], "maxpools": [False, True], "pool_sizes": [None, 2], "convo_dropouts": [None, 0.25], "nb_dense_layers": 1, "dense_hidden_neurons": [477], "dense_activations": ["relu"], "dense_dropouts": [0.5], "loss": "categorical_crossentropy", "optimizer": "adadelta", "nb_epoch": 30, "batch_size": 32, "weights_file": "weights.hdf5", "results_file": "results.dat" } fitlog, score = cnn.cnn(**kwargs)
kwargs = { "X_train": X_train, "Y_train": Y_train, "X_test": X_test, "Y_test": Y_test, "input_shape": (1, 96, 96), "nb_classes": 447, "data_augmentation": True, "nb_convo_layers": 3, "nb_filters": [32, 64, 128], "nb_conv": [3, 3, 3], "convo_activations": ["relu", "relu", "relu"], "maxpools": [True, True, True], "pool_sizes": [2, 2, 2], "convo_dropouts": [None, None, None], "nb_dense_layers": 3, "dense_hidden_neurons": [1000, 1000, 447], "dense_activations": ["relu", "relu", "softmax"], "dense_dropouts": [0.5, 0.5, None], "loss": "categorical_crossentropy", "optimizer": "adadelta", "nb_epoch": 200, "batch_size": 32, "model_file": "model.json", "weights_file": "weights.dat", "results_file": "results.dat" } # run cnn cnn.cnn(**kwargs)
"input_shape": (1, 96, 96), "nb_classes": 447, "data_augmentation": True, "nb_convo_layers": 3, "nb_filters": [32, 64, 128], "nb_conv": [5, 5, 3], "convo_activations": ["relu", "relu", "relu"], "maxpools": [True, True, True], "pool_sizes": [2, 2, 2], "convo_dropouts": [None, None, None], "nb_dense_layers": 2, "dense_hidden_neurons": [1000, 447], "dense_activations": ["relu", "softmax"], "dense_dropouts": [0.5, None], "loss": "categorical_crossentropy", "optimizer": "adadelta", "nb_epoch": 200, "batch_size": 32, "model_file": "model2.json", "weights_file": "weights2.dat", "results_file": "results2.dat" } # run cnn cnn.cnn(**kwargs)
def evaluate_cnn(eval_data): individual = eval_data["individual"] nn_data = eval_data["nn_data"] score_cache = eval_data["score_cache"] error_cache = eval_data["error_cache"] X_train = nn_data["X_train"] Y_train = nn_data["Y_train"] X_test = nn_data["X_test"] Y_test = nn_data["Y_test"] input_shape = nn_data["input_shape"] n_outputs = nn_data["n_outputs"] # just to stop syntax checker complaining vars not being used X_train Y_train X_test Y_test # check cache chromo_str = "".join(map(str, individual["chromosome"])) if chromo_str in score_cache: print " in cache! score is: {1}".format(chromo_str, score_cache[chromo_str]) score = score_cache[chromo_str] individual["score"] = score return False # create folder global EVALUATION_COUNTER model_save_dir = nn_data["model_save_dir"] model_path = os.path.join(model_save_dir, "model_{0}".format(EVALUATION_COUNTER)) os.mkdir(model_path) EVALUATION_COUNTER += 1 # mapping kwargs = mapping.keras_mapping2(individual) kwargs["X_train"] = X_train kwargs["Y_train"] = Y_train kwargs["X_test"] = X_test kwargs["Y_test"] = Y_test kwargs["input_shape"] = input_shape kwargs["nb_classes"] = n_outputs kwargs["model_file"] = os.path.join(model_path, "model.json") kwargs["weights_file"] = os.path.join(model_path, "weights.dat") kwargs["results_file"] = os.path.join(model_path, "results.dat") # execute try: results, model_score = cnn.cnn(**kwargs) print("[score: {0}]".format(model_score[1])) individual["score"] = model_score[1] score_cache[chromo_str] = model_score[1] error_cache[chromo_str] = model_score[0] except Exception: print "[failed]" if DEBUG: import traceback traceback.print_exc() score = -1 return True