示例#1
0
def data():
	# maxlen = 100
	# max_features = 20000
	frame_number= 50
	emotions = ['sad','ang', 'neu', 'exc']
	print('Loading data...')

	X_train, y_train, weights = static_dataset('train','M',emotions,frame_number)
	X_test, y_test = static_dataset('test','M',emotions,frame_number)
	print(len(X_train), 'train sequences')
	print(len(X_test), 'test sequences')

	print("Pad sequences (samples x time)")
	# X_train = sequence.pad_sequences(X_train, maxlen=maxlen)
	# X_test = sequence.pad_sequences(X_test, maxlen=maxlen)
	print('X_train shape:', X_train.shape)
	print('X_test shape:', X_test.shape)

	return X_train, X_test, y_train, y_test, weights
bias = True
drop_rate = 0.2

feature_number = 87
if feature_type == "LLF":
    feature_number = 31

normal_FFNN = FFNN(trainable=trainable,
                   feature_number=feature_number,
                   frame_number=frame_number,
                   emotions=emotions,
                   lr=0.0001)

#MALE DATASET
x_tr_m, y_tr_m, class_weight_dict_m = static_dataset(feature_type, 'train',
                                                     'M', emotions,
                                                     frame_number)
x_ts_m, y_ts_m, _ = static_dataset(feature_type, 'validation', 'M', emotions,
                                   frame_number)

#FEMALE DATASET
x_tr_f, y_tr_f, class_weight_dict_f = static_dataset(feature_type, 'train',
                                                     'F', emotions,
                                                     frame_number)
x_ts_f, y_ts_f, _ = static_dataset(feature_type, 'test', 'F', emotions,
                                   frame_number)

#DATASET NORMALIZATION
x_tr_m = numpy.array(x_tr_m)
x_tr_m = tf.keras.utils.normalize(x_tr_m, axis=-1, order=2)
y_tr_m = numpy.array(y_tr_m)
示例#3
0
regu = 0.0000
bias = True
drop_rate = 0.2

feature_number = 87
if feature_type == "LLF":
    feature_number = 31

model = FFNN(trainable=trainable,
             feature_number=feature_number,
             frame_number=frame_number,
             emotions=emotions,
             lr=0.0001)

#FEMALE DATASET
x, y, class_weight_dict = static_dataset(feature_type, 'train', 'F', emotions,
                                         frame_number)
x_v, y_v, _ = static_dataset(feature_type, 'test', 'F', emotions, frame_number)

#FEMALE DATASET

print(len(x))
x = x[::4]
y = y[::4]

#DATASET NORMALIZATION
x = numpy.array(x)
x = tf.keras.utils.normalize(x, axis=-1, order=2)
y = numpy.array(y)

#cass_weight_dict = weight_class('train',emotions,'M')
#callback
plot_losses = PlotLosses()

earlystop = EarlyStopping(monitor='val_acc', min_delta=0.0001, patience=10, \
                          verbose=1, mode='auto')


trainable = 'True'


#emotions = ['ang','dis','exc','fea','fru','hap','neu','oth','sad','sur','xxx']
emotions = ['ang','exc','neu','sad']
size_batch2 = 8
frame_number = 1

x,y,class_weight_dict = static_dataset('train','M',emotions,frame_number)
x_val,y_val,class_weight_dict_test = static_dataset('validation','M',emotions,frame_number)
x_test,y_test,class_weight_dict_test = static_dataset('test','M',emotions,frame_number)


x = tf.keras.utils.normalize(x,    axis=-1,    order=2)

#search parameters for hyperparameter optimization
#batch_sizes = [16, 32, 64, 128, 256]
batch_sizes = [8]
#epochs = [10, 50, 100]
epochs = [300]
#optimizers = [sgd, rmsdrop, adagrad, adadelta, adam, adamax, nadam]
#learn_rates = [0.00001, 0.0001, 0.001, 0.01, 0.1, 0.2]
#learn_rates = [0.0001,0.00005,0.00001,0.000005,0.000001]
learn_rates = [0.01,0.005,0.001,0.0005,0.0001,0.00005,0.00001,0.000001,0.0000001]