def data(args): train_data = data_process.read_files(args.data_path, "train") test_data = data_process.read_files(args.data_path, "test") train_feature, train_mask, train_label, train_seq_len = data_process.get_data_label( args, train_data) test_feature, test_mask, test_label, test_seq_len = data_process.get_data_label( args, test_data) vail_feature, vail_mask, vail_seq_len = data_process.read_vail( args, "test.txt") #print(train_feature[0:2],train_mask[0:2],train_seq_len[0:2],train_label[0:2]) #print(test_feature[0:2],test_mask[0:2],test_seq_len[0:2], test_label[0:2]) #转化为tensor train_feature, train_mask, train_label, train_seq_len = data_process.to_tensor( train_feature, train_mask, train_label, train_seq_len) test_feature, test_mask, test_label, test_seq_len = data_process.to_tensor( test_feature, test_mask, test_label, test_seq_len) train_set = torch.utils.data.TensorDataset(train_feature, train_mask, train_label, train_seq_len) test_set = torch.utils.data.TensorDataset(test_feature, test_mask, test_label, test_seq_len) train_iter = torch.utils.data.DataLoader(train_set, batch_size=args.batch_size, shuffle=True) test_iter = torch.utils.data.DataLoader(test_set, batch_size=args.batch_size, shuffle=True) vail_feature, vail_mask, _, vail_seq_len = data_process.to_tensor( vail_feature, vail_mask, vail_mask, vail_seq_len) vail_set = torch.utils.data.TensorDataset(vail_feature, vail_mask, vail_seq_len) vail_iter = torch.utils.data.DataLoader(vail_set, batch_size=args.batch_size, shuffle=False) return train_iter, test_iter, vail_iter
#%% Main Function if __name__ == '__main__': #%% ########## Read Data ########## print('read data') ECG_data, ECG_label = read_data.extract_data( read_data.read_data(Args)) # read data #%% ########## Data Processing ########## ECG_data = data_process.cut_out(ECG_data, Args.len) # cut out the ECG signals ECG_data = data_process.axis_change(ECG_data) # change the axis ECG_label = data_process.label_from_0(ECG_label) # label from 0 # split data train_x, test_x, train_y, test_y = data_process.train_test_split( ECG_data, ECG_label, trainratio=Args.trainratio, random_state=0) # change to Tensor train_x, train_y = data_process.to_tensor(train_x, train_y) test_x, test_y = data_process.to_tensor(test_x, test_y) #%% ########## Load Data ########## # change to dataset train_dataset = Data.TensorDataset(train_x, train_y) test_dataset = Data.TensorDataset(test_x, test_y) # load data print('load data') loader_train = Data.DataLoader( dataset=train_dataset, # torch TensorDataset format batch_size=Args.batch_size, # mini batch size shuffle=True, # shuffle or not num_workers=0, # multi-worker for load data ) loader_test = Data.DataLoader(test_dataset, Args.batch_size) del ECG_data, ECG_label, train_x, test_x, train_y, test_y