reduce_lr = ReduceLROnPlateau('val_loss', factor=0.1, patience=int(patience / 4), verbose=1) trained_models_path = base_path + '_mini_XCEPTION' model_names = trained_models_path + '.{epoch:02d}-{val_acc:.2f}.hdf5' model_checkpoint = ModelCheckpoint(model_names, 'val_loss', verbose=1, save_best_only=True) callbacks = [model_checkpoint, csv_logger, early_stop, reduce_lr] # loading dataset faces, emotions = load_fer2013() faces = preprocess_input(faces) num_samples, num_classes = emotions.shape xtrain, xtest, ytrain, ytest = train_test_split(faces, emotions, test_size=0.2, shuffle=True) model.fit_generator(data_generator.flow(xtrain, ytrain, batch_size), steps_per_epoch=len(xtrain) / batch_size, epochs=num_epochs, verbose=1, callbacks=callbacks, validation_data=(xtest, ytest)) model.save("hdf.h5")
early_stop = EarlyStopping('val_loss', patience=patience) reduce_lr = ReduceLROnPlateau('val_loss', factor=0.1, patience=int(patience/4), verbose=1) trained_models_path = base_path + '_mini_XCEPTION' model_names = trained_models_path + '.{epoch:02d}-{val_acc:.2f}.hdf5' model_checkpoint = ModelCheckpoint(model_names, 'val_loss', verbose=1, save_best_only=True) callbacks = [model_checkpoint, csv_logger, early_stop, reduce_lr] # loading dataset if os.path.isfile(base_data_path + "processed_data_without_pseudo.pickle"): with open(base_data_path + "processed_data_without_pseudo.pickle", "rb") as f: faces, emotions = pickle.load(f) f.close() else: faces, emotions = load_fer2013(custom=True) faces = preprocess_input(faces) with open(base_data_path + "processed_data_without_pseudo.pickle", "wb") as f: pickle.dump((faces, emotions), f) f.close() pseudo = False if pseudo and os.path.isfile("./data/pseudo_data.pickle"): with open("./data/pseudo_data.pickle", "rb") as f: pseudoImages, pseudoLabels = pickle.load(f) f.close() pseudoImages = np.asarray(pseudoImages).reshape((len(pseudoImages), 48, 48, 1)) pseudoLabels = pd.get_dummies(pseudoLabels).as_matrix()