import sys import keras.backend as K import numpy as np import os from keras.utils import to_categorical from sklearn.model_selection import train_test_split rel_filepath = sys.argv[1] continue_setup = Setup('') continue_setup.load(rel_filepath=rel_filepath) change_lr = None if change_lr is not None: K.set_value(continue_setup.getModel().optimizer.lr, change_lr) print('Changing the model optimizer learning rate to = %f' % K.get_value(continue_setup.getModel().optimizer.lr)) else: print('Model optimizer learning rate = %f' % K.get_value(continue_setup.getModel().optimizer.lr)) XTrain_directory, YTrain_directory, XValidation_directory, YValidation_directory, XTest_directory, YTest_directory = continue_setup.getDataDirectory( ) no_of_classes = 15000 def train_data_generator(XTrain_directory, YTrain_directory): filenames = [str(i) + '.npy' for i in range(2000, 1144000 + 1, 2000)] + ['1144636.npy']
from cnn.Setup import Setup import sys import keras.backend as K rel_filepath = sys.argv[1] continue_setup = Setup('') continue_setup.load(rel_filepath=rel_filepath) change_lr = None if change_lr is not None: K.set_value(continue_setup.getModel().optimizer.lr, change_lr) print('Changing the model optimizer learning rate to = %f' % K.get_value(continue_setup.getModel().optimizer.lr)) else: print('Model optimizer learning rate = %f' % K.get_value(continue_setup.getModel().optimizer.lr)) X_train_cnn, y_train_one_hot, X_val_cnn, y_val_one_hot, X_test_cnn, y_test_one_hot = continue_setup.getData() for epoch in range(continue_setup.getEpoch() + 1, 10000): print('Training \'%s\': Epoch %d' % (continue_setup.getName(), epoch)) dropout = continue_setup.getModel().fit(X_train_cnn, y_train_one_hot, batch_size=64, epochs=1, verbose=1, validation_data=(X_val_cnn, y_val_one_hot)) continue_setup.updateEpochs(add_epochs=1, train_acc=dropout.history['acc'], train_loss=dropout.history['loss'], val_acc=dropout.history['val_acc'], val_loss=dropout.history['val_loss'], test_acc=[0],
import os from keras.utils import to_categorical from sklearn.model_selection import train_test_split rel_filepath = sys.argv[1] XTest_directory = sys.argv[2] continue_setup = Setup('') continue_setup.load(rel_filepath=rel_filepath) no_of_classes = 15000 def test_data_generator(XTest_directory, YTest_directory): filenames = [str(i) + '.npy' for i in range(2000, 114000 + 1, 2000)] + ['115424.npy'] while True: for filename in filenames: X_test = np.load(os.path.join(XTest_directory, filename)) X_test = X_test.reshape(-1, 2048, 1, 1) yield (X_test) y_pred = continue_setup.getModel().predict_generator(test_data_generator( XTest_directory, None), steps=(114000 / 2000 + 1)) y_pred = np.argmax(y_pred, axis=1) np.save('test_result.npy', y_pred)