def load_data_val_6(testList, W, index, batch): x_train_1 = [] x_train_2 = [] x_train_3 = [] for i in range(0, batch): true_index = index + i if (true_index >= len(testList)): true_index = len(testList) - 1 items = testList[true_index].split(' ') q_words = items[2].split('_') a_words = items[3].split('_') x_train_1_words = [] x_train_2_words = [] x_train_3_words = [] for i in range(50): x_train_1_words.append( ModelUtils.build_text_image(W, q_words[i], padding=1)) x_train_2_words.append( ModelUtils.build_text_image(W, a_words[i], padding=1)) x_train_3_words.append( ModelUtils.build_text_image(W, a_words[i], padding=1)) x_train_1.append(np.array(x_train_1_words).reshape((50, 300))) x_train_2.append(np.array(x_train_2_words).reshape((50, 300))) x_train_3.append(np.array(x_train_3_words).reshape((50, 300))) return np.array(x_train_1), np.array(x_train_2), np.array(x_train_3)
def load_data_6(W, alist, raw, size): x_train_1 = [] x_train_2 = [] x_train_3 = [] for i in range(0, size): items = raw[random.randint(0, len(raw) - 1)] nega = rand_qa(alist) q_words = items[2].split('_') a_words = items[3].split('_') neg_words = nega.split('_') x_train_1_words = [] x_train_2_words = [] x_train_3_words = [] for i in range(50): x_train_1_words.append( ModelUtils.build_text_image(W, q_words[i], padding=1)) x_train_2_words.append( ModelUtils.build_text_image(W, a_words[i], padding=1)) x_train_3_words.append( ModelUtils.build_text_image(W, neg_words[i], padding=1)) x_train_1.append(np.array(x_train_1_words).reshape((50, 300))) x_train_2.append(np.array(x_train_2_words).reshape((50, 300))) x_train_3.append(np.array(x_train_3_words).reshape((50, 300))) return np.array(x_train_1), np.array(x_train_2), np.array(x_train_3)