def prepare_training_set(params): db_values_x_training, db_values_y_training, db_names_training, db_values_x_test, db_values_y_test, db_names_test = load_db( params) if params[ "normalise_data"] == 3: #We normalise the target and input values. print("Normalasing the input and target values") db_values_x_training, db_values_x_test, men, std = ut.normalise_data( db_values_x_training, db_values_x_test) params["x_men"] = men params["x_std"] = std db_values_y_training, db_values_y_test, men, std = ut.normalise_data( db_values_y_training, db_values_y_test) params["y_men"] = men params["y_std"] = std if params["subsample"] > 0: print("Subsampling frames %s" % params["subsample"]) db_values_x_training, db_values_y_training, db_names_training, seq_rel_train = subsample_frames( params, db_values_x_training, db_values_y_training, db_names_training) db_values_x_test, db_values_y_test, db_names_test, seq_rel_test = subsample_frames( params, db_values_x_test, db_values_y_test, db_names_test) params["seq_rel_train"] = seq_rel_train params["seq_rel_test"] = seq_rel_test X_train, Y_train, F_list_train, G_list_train, S_Train_list, R_L_Train_list = prepare_sequences( params, db_values_x_training, db_values_y_training, db_names_training) X_test, Y_test, F_list_test, G_list_test, S_Test_list, R_L_Test_list = prepare_sequences( params, db_values_x_test, db_values_y_test, db_names_test) return (params, X_train, Y_train, F_list_train, G_list_train, S_Train_list, R_L_Train_list, X_test, Y_test, F_list_test, G_list_test, S_Test_list, R_L_Test_list)
def load_flat_data(params): db_values_x_training, db_values_y_training, db_names_training, db_values_x_test, db_values_y_test, db_names_test = load_db( params) if params[ "normalise_data"] == 3: #We normalise the target and input values. print("Normalasing the input and target values") db_values_x_training, db_values_x_test, men, std = ut.normalise_data( db_values_x_training, db_values_x_test) params["x_men"] = men params["x_std"] = std db_values_y_training, db_values_y_test, men, std = ut.normalise_data( db_values_y_training, db_values_y_test) params["y_men"] = men params["y_std"] = std if params[ "normalise_data"] == 4: # We normalise the target and input values. print( "Normalasing the input and target values with same std of entire training set" ) db_values_x_training, db_values_y_training, db_values_x_test, db_values_y_test, men, std = \ ut.complete_normalise_data(db_values_x_training,db_values_y_training,db_values_x_test,db_values_y_test) params["x_men"] = men params["x_std"] = std if params["subsample"] > 0: print("Subsampling frames %s" % params["subsample"]) db_values_x_training, db_values_y_training, db_names_training, seq_rel_train = subsample_frames( params, db_values_x_training, db_values_y_training, db_names_training) db_values_x_test, db_values_y_test, db_names_test, seq_rel_test = subsample_frames( params, db_values_x_test, db_values_y_test, db_names_test) params["seq_rel_train"] = seq_rel_train params["seq_rel_test"] = seq_rel_test return params, db_values_x_training, db_values_y_training, db_names_training, db_values_x_test, db_values_y_test, db_names_test
# label_path_1 = 'demo/Emotion/data/sensor_b8r3_c5_y_s40_e80.npy' azi = np.load(azi_data_path) ele = np.load(ele_data_path) label = np.zeros((len(emotion_list), 80)) for i in range(len(label)): label[i] = i label = label.flatten() # expand dims azi = np.expand_dims(azi, axis=1) ele = np.expand_dims(ele, axis=1) # normalize azi = normalise_data(azi) ele = normalise_data(ele) # split data azi_train, azi_test, ele_train, ele_test, label_train, label_test = train_test_split( azi, ele, label, test_size=0.2, random_state=25, stratify=label) train_loader = senor_heatmap_label_data_loader(azi_train, ele_train, label_train, batch_size=BATCH_SIZE) test_loader = senor_heatmap_label_data_loader( azi_test, ele_test, label_test, batch_size=np.shape(azi_test)[0]) # log path # path = dir_path("sensor_heatmap_3dcnn", result_dir)
'flatten_factor': 30, } # results dir result_dir = "FER/results" # load data # df_x = np.load('C:/Users/Zber/Documents/Dev_program/OpenRadar/demo/Emotion/data/emotion_3s_diff_segment_x.npy') # df_y = np.load('C:/Users/Zber/Documents/Dev_program/OpenRadar/demo/Emotion/data/emotion_3s_diff_segment_y.npy') df_x = np.load('//data/emotion_3s_diff_x.npy') df_y = np.load('//data/emotion_3s_diff_y.npy') # normalization df_x = normalise_data(df_x) df_x = np.expand_dims(df_x, axis=2) # df_y_eye = np_to_eye(df_y, num_class=7) # split data x_train, x_test, y_train, y_test = train_test_split(df_x, df_y, test_size=0.2, random_state=25, stratify=df_y) train_loader = data_loader(x_train, y_train, batch_size=BATCH_SIZE) test_loader = data_loader(x_test, y_test, batch_size=BATCH_SIZE) # log path path = dir_path("emotion_3s_diff_segment", result_dir) # create model model = LeNet(**model_para) model = model.to(device)