def DiffSamp(samples): for samples in samples: model = get_model() opt = RMSprop(lr=0.0001, decay=1e-6) optName = "RMSProp" initName = "dataInit" lr = 0.0001 epochs = 50 model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy']) model = jan_initialize(model, X_train, samples) #check the working function :) hist = model.fit(X_train, Y_train, batch_size=batch_size, epochs=epochs, validation_data=(X_test, Y_test)) # HistoryPath = os.path.join( save_dir, optName + "_" + initName + "_Hist_" + "lr_{:1}_epoch_{:1d}_Jan28_".format(lr, epochs) + str(samples) + "_Samples") saveHistory(HistoryPath, hist) return
def trainOptInit(X_train, X_test, Y_train, Y_test, initDict, optDict={'RMSProp': RMSprop(lr=0.0001, decay=1e-6)}, mode='full', saveHist=True): if mode == 'test': #Just test over a few values X_train = X_train[0:200] Y_train = Y_train[0:200] X_test = X_test[0:40] Y_test = Y_test[0:40] for optName, opt in optDict.items(): #---------------------------------------------- For Different Init and Optimizers paras for key, initName in initDict.items(): print(" Training for {:s} initialization with {:s} as optimizer". format(key, optName)) model = get_model(initName, input_shape=X_train.shape[1:], num_classes=num_classes) # lr = 0.0001 # opt = RMSprop(lr=lr, decay=1e-6) model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy']) # hist = model.fit(X_train,Y_train, batch_size=batch_size, epochs=epochs, validation_data=(X_test,Y_test),shuffle=True); #hist will store everything #earlyStop = EarlyStopping(monitor='val_acc', min_delta=0, patience=10, verbose=0, mode='max') hist = model.fit(X_train, Y_train, batch_size=batch_size, epochs=epochs, validation_data=(X_test, Y_test), shuffle=True, callbacks=[earlyStop]) #With Callback if (saveHist == True): #---------------------------------------------- Save History & Model model.save( os.path.join( save_dir, optName + initName + "_lr_{:1}_epoch_{:1d}_".format(lr, epochs) + model_name)) HistoryPath = os.path.join( save_dir, optName + "_" + initName + "_Hist_" + "lr_{:1}_epoch_{:1d}_".format(lr, epochs) + model_name) saveHistory(HistoryPath, hist) return 1
base_dir = "/home/ankit/Desktop/Dataset/JanResults/CIFAR10"; #save results here... data_folder = "/home/ankit/Desktop/Dataset/CIFAR10Dataset/cifar-10-batches-py/"; #Server img_size = 32; batch_size = 32; num_classes = 10; epochs = 10; #------------------- Get the X_train and Model for temporary use X_train,Y_train,X_test,Y_test = load_data_full(data_folder=data_folder, batch_no = 5,img_size=img_size) model = get_model(); del Y_train, X_test,Y_test; #Freeing up the space #--------------------------------------------------------------------------------------------------- def jan_initialize(model,X_train,samp_size = 8): # samp_size = 8192; totalFiles = list(np.arange(X_train.shape[0])) random.shuffle(totalFiles); layers = model.layers;
img_size = 32 batch_size = 32 num_classes = 10 epochs = 100 save_dir = os.path.join(base_dir, 'Jan18') model_name = 'Jan18' # history_name = 'Keras_CIFAR10_HeInit_v1' X_train, Y_train, X_test, Y_test = load_data_full(data_folder=data_folder, batch_no=5, img_size=img_size) # X_train = X_train[0:200];Y_train = Y_train[0:200]; # X_test = X_test[0:40];Y_test = Y_test[0:40]; model = get_model() samples = 1024 opt = RMSprop(lr=0.0001, decay=1e-6) model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy']) model = jan_initialize(model, X_train, samples) #check the working function :) # # hist = model.fit(X_train, Y_train, batch_size=batch_size, epochs=epochs,
img_size = 32 batch_size = 32 num_classes = 10 epochs = 150 save_dir = os.path.join(base_dir,'SavedModels/ProposedModel_try1') model_name = 'Keras_CIFAR10_proposed_v1.h5' history_name = 'Keras_CIFAR10_proposed_v1' #load Train/Test X_train,Y_train,X_test,Y_test = load_data_full(data_folder=data_folder, batch_no = 5,img_size=img_size) #ModelKeys initDict = {'LeCun':'lecun_uniform', 'GlorotNorm':'glorot_normal','GlorotUni':'glorot_uniform','HeNorm':'he_normal','HeUni':'he_uniform'} model = get_model(initDict['HeNorm'],input_shape= X_train.shape[1:],num_classes = num_classes) model = scaledInitialize(model= model, X_train=X_train) #Proposed initialization you want to initialize initName = 'Proposed_try_1' optName = 'RMSProp' lr = 0.0001 opt = RMSprop(lr=lr, decay=1e-6) model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy']) hist = model.fit(X_train,Y_train, batch_size=batch_size, epochs=epochs, validation_data=(X_test,Y_test),shuffle=True); #hist will store everything earlyStop = EarlyStopping(monitor='val_acc', min_delta=0, patience=10, verbose=0, mode='max') # hist = model.fit(X_train, Y_train, batch_size=batch_size, epochs=epochs, validation_data=(X_test, Y_test), shuffle=True,callbacks=[earlyStop]); # With Callback # ---------------------------------------------- Save History & Model model.save(os.path.join(save_dir, optName + initName + "_lr_{:1}_epoch_{:1d}_".format(lr, epochs) + model_name))