lag_feature = Y[i][0] else: lag_feature = Y[i - 1][0] x[i].append(lag_feature) x = np.array(x) print('The shape of feature data (including lag feature):', x.shape) return x if __name__ == '__main__': num_classes = 3 learning_rate = 1e-3 dataset = DatasetLoader() x, Y = dataset.load_csv('./dataset/train.csv', num_classes) x /= 255 # x = add_lag_feature(x) x, Y = buildTrain(x, Y) print('The shape of training data:', x.shape) print('The shape of training label:', Y.shape) Y = to_categorical(Y, num_classes) model = Sequential() model.add( LSTM(512, return_sequences=True, input_shape=(x.shape[1], x.shape[2]))) model.add(Dense(num_classes, activation='softmax')) model.summary() optimizer = Adam(lr=learning_rate)
else: lag_feature = Y[i - 1][0] x[i].append(lag_feature) x = np.array(x) print('The shape of feature data (including lag feature):', x.shape) return x if __name__ == '__main__': input_dim = 24 num_classes = 3 learning_rate = 1e-4 dataset = DatasetLoader() x, Y = dataset.load_csv('./fgd_prediction/dataset/train.csv', num_classes) x /= 255 # x = add_lag_feature(x) Y = to_categorical(Y, num_classes) model = Sequential() model.add(Dense(24, input_dim=input_dim, activation='relu')) model.add(Dense(12, activation='relu')) model.add(Dense(8, activation='relu')) model.add(Dense(num_classes, activation='softmax')) model.summary() optimizer = Adam(lr=learning_rate) model.compile(loss='categorical_crossentropy', optimizer='adam',