def test_mlp_whale(self): X_train, Y_train, X_test, Y_test = data.load_whale_data( WHALE_TRAIN_DATA, WHALE_TEST_DATA, dim=1) input_shape = (X_train.shape[1], ) nb_classes = 447 for nb_neurons in [447, 500, 600, 800, 1000]: # for nb_neurons_2 in [447, 500, 600, 800, 1000]: # for nb_neurons_3 in [447, 500, 600, 800, 1000]: # target_path = "mlp_{0}_{1}_{2}_447".format( # nb_neurons, # nb_neurons_2, # nb_neurons_3 # ) target_path = "mlp_{0}_447".format(nb_neurons) mkdir_p(target_path) mlp.mlp(X_train=X_train, Y_train=Y_train, X_test=X_test, Y_test=Y_test, input_shape=input_shape, nb_classes=nb_classes, nb_layers=2, hidden_neurons=[nb_neurons, 447], activations=["tanh", "tanh", "tanh", "tanh"], dropouts=[0.1, 0.1, 0.1, 0.1, 0.1], loss='categorical_crossentropy', optimizer="adadelta", nb_epoch=30, batch_size=100, model_file=os.path.join(target_path, "model.json"), results_file=os.path.join(target_path, "results.dat"), weights_file=os.path.join(target_path, "weights.dat"))
def test_cnn_whale(self): X_train, Y_train, X_test, Y_test = data.load_whale_data( WHALE_TRAIN_DATA, WHALE_TEST_DATA) # X_train = [] # Y_train = [] # X_test = [] # Y_test = [] kwargs = { "X_train": X_train, "Y_train": Y_train, "X_test": X_test, "Y_test": Y_test, "input_shape": (1, 192, 192), "nb_classes": 477, "nb_convo_layers": 2, "nb_filters": [32, 32], "nb_conv": [3, 3], "convo_activations": ["relu", "relu"], "maxpools": [False, True], "pool_sizes": [None, 2], "convo_dropouts": [None, 0.25], "nb_dense_layers": 1, "dense_hidden_neurons": [477], "dense_activations": ["relu"], "dense_dropouts": [0.5], "loss": "categorical_crossentropy", "optimizer": "adadelta", "nb_epoch": 30, "batch_size": 32, "weights_file": "weights.hdf5", "results_file": "results.dat" } fitlog, score = cnn.cnn(**kwargs)
def test_run(self): random.seed(42) X_train, Y_train, X_test, Y_test = data.load_whale_data( WHALE_TRAIN_DATA, WHALE_TEST_DATA ) # neural network data nn_data = { "X_train": X_train, "Y_train": Y_train, "X_test": X_test, "Y_test": Y_test, "input_shape": (1, 96, 96), "n_outputs": 447, "model_save_dir": "/data/nn_exp" } # run ga.run( nn_data=nn_data, chromo_size=500, max_gen=20, pop_size=20, t_size=2, record_file_path="execution_test.dat", score_file_path="score_test.dat", error_file_path="error_test.dat" )
def test_denoising_autoencoder(self): # # load data X_train, Y_train, X_test, Y_test = data.load_whale_data( WHALE_TRAIN_DATA, WHALE_TEST_DATA, dim=1) # autoencoder parameters batch_size = 32 nb_epoch = 1000 nb_neurons = 600 img_dim = X_train.shape[1] # create and train X_train_tmp = np.copy(X_train) ae = Sequential() ae.add( AutoEncoder( encoder=Dense(nb_neurons, input_dim=img_dim, activation='sigmoid'), decoder=Dense(img_dim, input_dim=nb_neurons, activation='sigmoid'), output_reconstruction=True, )) # compile ae.compile(loss="mean_squared_error", optimizer="adadelta") # fit ae.fit(X_train_tmp, X_train_tmp, batch_size=batch_size, nb_epoch=nb_epoch) results = ae.predict(X_train, verbose=1) np.save("results_2.npy", results) img_data = np.concatenate((X_train[0:9], results[0:9])) data.plot_multiple_mnist_images(img_data[0:9], (6, 3))
#!/usr/bin/env python2 import os import sys import random sys.path.append(os.path.join(os.path.dirname(__file__), "../../")) import recognizer.data as data import recognizer.tuner.ga as ga # GLOBAL VARIABLES WHALE_TRAIN_DATA = "/data/whale_data/manual_rotated/train_data.csv" WHALE_TEST_DATA = "/data/whale_data/manual_rotated/test_data.csv" if __name__ == "__main__": random.seed(42) X_train, Y_train, X_test, Y_test = data.load_whale_data( WHALE_TRAIN_DATA, WHALE_TEST_DATA) # neural network data train_end = len(X_train) / 0.5 test_end = len(X_test) / 0.5 nn_data = { "X_train": X_train[0:train_end], "Y_train": Y_train[0:train_end], "X_test": X_test[0:test_end], "Y_test": Y_test[0:test_end], "input_shape": (1, 96, 96), "n_outputs": 447, "model_save_dir": "/data/ga_cnn_dataset_3" } # run
import recognizer.data as data import recognizer.tuner.ga as ga # GLOBAL VARIABLE WHALE_TRAIN_DATA = "/home/chutsu/whale_data/train.csv" WHALE_TEST_DATA = "/home/chutsu/whale_data/test.csv" if __name__ == "__main__": seed = int(sys.argv[1]) random.seed(42 + seed) # neural network data X_train, Y_train, X_test, Y_test = data.load_whale_data( WHALE_TRAIN_DATA, WHALE_TEST_DATA ) train_end = int(round(len(X_train) * 0.01)) test_end = int(round(len(X_test) * 0.01)) nn_data = { "X_train": X_train[:train_end], "Y_train": Y_train[:train_end], "X_test": X_test[:test_end], "Y_test": Y_test[:test_end], "input_shape": (1, 192, 192), "n_outputs": 447 } # run ga.run( nn_data=nn_data,