Пример #1
0
def DiffSamp(samples):

    for samples in samples:

        # model = get_model();
        model = get_model_activation(act=act)

        opt = RMSprop(lr=0.0007, decay=1e-6)
        optName = "RMSProp"
        initName = "dataInit"
        lr = 0.0007
        epochs = 50

        model.compile(loss='categorical_crossentropy',
                      optimizer=opt,
                      metrics=['accuracy'])

        model = jan_initialize(model, X_train,
                               samples)  #check the working function :)
        hist = model.fit(X_train,
                         Y_train,
                         batch_size=batch_size,
                         epochs=epochs,
                         validation_data=(X_test, Y_test))

        HistoryPath = os.path.join(
            save_dir, optName + "_" + initName + "_Hist_" + act +
            "_lr_{:1}_epoch_{:1d}_".format(lr, epochs) + model_name)
        saveHistory(HistoryPath, hist)
    return
def trainOptInit(X_train,
                 X_test,
                 Y_train,
                 Y_test,
                 initDict,
                 optDict={'RMSProp': RMSprop(lr=0.0001, decay=1e-6)},
                 mode='full',
                 saveHist=True):
    if mode == 'test':  #Just test over a few values
        X_train = X_train[0:200]
        Y_train = Y_train[0:200]
        X_test = X_test[0:40]
        Y_test = Y_test[0:40]

    for optName, opt in optDict.items():
        #---------------------------------------------- For Different Init and Optimizers paras
        for key, initName in initDict.items():
            print(" Training for {:s} initialization with {:s} as optimizer".
                  format(key, optName))
            model = get_model(initName,
                              input_shape=X_train.shape[1:],
                              num_classes=num_classes)
            # lr = 0.0001
            # opt = RMSprop(lr=lr, decay=1e-6)
            model.compile(loss='categorical_crossentropy',
                          optimizer=opt,
                          metrics=['accuracy'])
            # hist = model.fit(X_train,Y_train, batch_size=batch_size, epochs=epochs, validation_data=(X_test,Y_test),shuffle=True);                                #hist will store everything
            #earlyStop = EarlyStopping(monitor='val_acc', min_delta=0, patience=10, verbose=0, mode='max')
            hist = model.fit(X_train,
                             Y_train,
                             batch_size=batch_size,
                             epochs=epochs,
                             validation_data=(X_test, Y_test),
                             shuffle=True,
                             callbacks=[earlyStop])
            #With Callback
            if (saveHist == True):
                #---------------------------------------------- Save History & Model
                model.save(
                    os.path.join(
                        save_dir, optName + initName +
                        "_lr_{:1}_epoch_{:1d}_".format(lr, epochs) +
                        model_name))
                HistoryPath = os.path.join(
                    save_dir, optName + "_" + initName + "_Hist_" +
                    "lr_{:1}_epoch_{:1d}_".format(lr, epochs) + model_name)
                saveHistory(HistoryPath, hist)
    return 1
Пример #3
0
    # To check the model if working
    # X_train = X_train[0:200, :]  # to reduce the data to ease the computations
    # Y_train = Y_train[0:200]  # comment these 2 lines while the original execution

    load_model_path = os.path.join(save_dir,
                                   str('batch_') + str(idx - 1) + model_name)
    print("Model Trained on batch {:1d} is loaded".format(idx - 1))
    model = load_model(load_model_path)

    filepath = base_dir + "BestModels/FirstModel/" + "batch_{0:01d}_".format(
        idx) + "Model - {epoch:02d}-{val_acc:.2f}.hdf5"
    checkpointer = ModelCheckpoint(filepath=filepath,
                                   verbose=1,
                                   monitor='categorical_accuracy',
                                   mode='max',
                                   save_best_only=True)
    callback_list = [checkpointer]
    hist = model.fit(X_train,
                     Y_train,
                     batch_size=batch_size,
                     epochs=epochs,
                     validation_split=0.2,
                     shuffle=True,
                     callbacks=callback_list)
    #hist will store everything

    model.save(os.path.join(save_dir, str('batch_') + str(idx) + model_name))
    saveHistory(os.path.join(save_dir,
                             str('hist_{0:01d}'.format(idx)) + model_name),
                hist=hist)  #Save History
Пример #4
0
model = jan_initialize(model, X_train, samples)  #check the working function :)
#
#
hist = model.fit(X_train,
                 Y_train,
                 batch_size=batch_size,
                 epochs=epochs,
                 validation_data=(X_test, Y_test))
#
optName = "RMSProp"
initName = "dataInit"
HistoryPath = os.path.join(
    save_dir, optName + "_" + initName + "_Hist_" +
    "lr_{:1}_epoch_{:1d}_Jan15".format(lr, epochs) + str(samples) + "_Samples")
saveHistory(HistoryPath, hist)

#-------------------------------------------------------------- all standard Init with model with different activations
#only import the top and start from here ....

base_dir = "/home/ankit/Desktop/Dataset/MarResults/CIFAR10/RMSProp/eight_layers/tanh"
#save results here...
save_dir = os.path.join(base_dir, 'Mar12')

data_folder = "/home/ankit/Desktop/Dataset/CIFAR10Dataset/cifar-10-batches-py/"  #Server
idx = 1
img_size = 32

batch_size = 32
num_classes = 10
model_name = 'Mar12'
Пример #5
0
                               mode='max',
                               save_best_only=True)
callback_list = [checkpointer]

# To check the model if working
# X_train = X_train[0:200, :]  # to reduce the data to ease the computations
# Y_train = Y_train[0:200]  # comment these 2 lines while the original execution

hist = model.fit(X_train,
                 Y_train,
                 batch_size=batch_size,
                 epochs=epochs,
                 validation_split=0.2,
                 shuffle=True,
                 callbacks=callback_list)
#hist will store everything

saveHistory(os.path.join(save_dir,
                         str('hist_') + history_name),
            hist=hist)  #Save History
# prevHist = loadHistory(os.path.join(save_dir,str('hist_') + model_name)                   #Load Histroy
#-------------------------------------------------------------------------------------------------- Testing
score = model.evaluate(X_train, Y_train, batch_size=32)
model.save(os.path.join(save_dir, model_name))

# model.save(os.path.join(save_dir, str('copy_') + model_name))         #save a copy of the model if you want

#--------------------------------------------------------------------------------------------------- Load Existing Model
load_model_path = os.path.join(save_dir, model_name)
model = load_model(load_model_path)