示例#1
0
def test_batch_generator(batch_size, max_enc_len=300):
    # 加载数据集
    test_X = load_test_dataset(max_enc_len)
    dataset = tf.data.Dataset.from_tensor_slices(test_X)
    dataset = dataset.batch(batch_size)
    steps_per_epoch = len(test_X) // batch_size
    return dataset, steps_per_epoch
def bream_test_batch_generator(beam_size, max_enc_len=200):
    # 加载数据集
    test_X = load_test_dataset(max_enc_len)
    for row in test_X:
        beam_search_data = tf.convert_to_tensor(
            [row for i in range(beam_size)])
        yield beam_search_data
示例#3
0
def generate_test_batch(batch_size, max_enc_len=200, max_dec_len=50):
    test_X = load_test_dataset(max_enc_len)

    dataset = tf.data.Dataset.from_tensor_slices(test_X)
    dataset = dataset.batch(batch_size, drop_remainder=False)

    return dataset
def inference():
    test_X = load_test_dataset()
    test_X_padding_mask = create_pad_mask(test_X)
    print('test_X.shape', test_X.shape)

    output_dim = test_X.shape[1]
    print('output_dim', output_dim)
    max_seq_len = test_X.shape[1]
    print('max_seq_len', max_seq_len)
    embedding_matrix = load_embedding_matrix(sentence_embedding_matrix_path)
    model = Transformer_for_Classification(embedding_matrix,
                                           max_enc_len=max_seq_len,
                                           output_dim=output_dim)
    model.load_weights(
        os.path.join(root, 'data', 'Extractive',
                     'BaiduQuestion_SIF_transformer.h5'))

    pred_y = model.predict((test_X, test_X_padding_mask))
    pred_y = np.where(pred_y > 0.5, 1, 0)
    np.save(os.path.join(root, 'data', 'result', 'SIF_test'), pred_y)
示例#5
0
def predict_result(model, params, vocab, result_save_path):
    test_X = load_test_dataset(params['max_enc_len'])
    # 预测结果
    results = greedy_decode(model, test_X, params['batch_size'], vocab, params)
    # 保存结果
    save_predict_result(results, result_save_path)