def test_quant_embedding(self):
        self.set_config()

        train_program = paddle.static.Program()
        startup_program = paddle.static.Program()
        with paddle.static.program_guard(train_program, startup_program):
            input_word = paddle.static.data(name="input_word",
                                            shape=[None, 1],
                                            dtype='int64')
            param_attr = paddle.ParamAttr(
                name='emb',
                initializer=paddle.nn.initializer.Uniform(-0.005, 0.005))
            weight = paddle.static.create_parameter((100, 128),
                                                    attr=param_attr,
                                                    dtype="float32")

            input_emb = paddle.nn.functional.embedding(x=input_word,
                                                       weight=weight,
                                                       sparse=True)

        infer_program = train_program.clone(for_test=True)

        place = paddle.CPUPlace()
        exe = paddle.static.Executor(place)
        exe.run(startup_program)

        quant_program = quant.quant_embedding(infer_program, place)
示例#2
0
    def test_quant_embedding(self):
        train_program = fluid.Program()
        with fluid.program_guard(train_program):
            input_word = fluid.data(
                name="input_word", shape=[None, 1], dtype='int64')
            input_emb = fluid.embedding(
                input=input_word,
                is_sparse=False,
                size=[100, 128],
                param_attr=fluid.ParamAttr(
                    name='emb',
                    initializer=fluid.initializer.Uniform(-0.005, 0.005)))

        infer_program = train_program.clone(for_test=True)

        use_gpu = True
        place = fluid.CUDAPlace(0) if use_gpu else fluid.CPUPlace()
        exe = fluid.Executor(place)
        exe.run(fluid.default_startup_program())

        quant_program = quant.quant_embedding(infer_program, place)
    def test_quant_embedding(self):
        train_program = paddle.static.Program()
        with paddle.static.program_guard(train_program):
            input_word = paddle.static.data(name="input_word",
                                            shape=[None, 1],
                                            dtype='int64')
            param_attr = paddle.ParamAttr(
                name='emb',
                initializer=paddle.nn.initializer.Uniform(-0.005, 0.005))
            weight = train_program.global_block().create_parameter(
                (100, 128), attr=param_attr, dtype="float32")

            input_emb = paddle.nn.functional.embedding(x=input_word,
                                                       weight=weight,
                                                       sparse=True)

        infer_program = train_program.clone(for_test=True)

        use_gpu = True
        place = paddle.CUDAPlace(0) if use_gpu else paddle.CPUPlace()
        exe = paddle.static.Executor(place)
        exe.run(paddle.static.default_startup_program())

        quant_program = quant.quant_embedding(infer_program, place)
示例#4
0
def infer_epoch(args, vocab_size, test_reader, use_cuda, i2w):
    """ inference function """
    place = fluid.CUDAPlace(0) if use_cuda else fluid.CPUPlace()
    exe = fluid.Executor(place)
    emb_size = args.emb_size
    batch_size = args.batch_size
    with fluid.scope_guard(fluid.Scope()):
        main_program = fluid.Program()
        with fluid.program_guard(main_program):
            values, pred = net.infer_network(vocab_size, emb_size)
            for epoch in range(start_index, last_index + 1):
                copy_program = main_program.clone()
                model_path = model_dir + "/pass-" + str(epoch)
                fluid.io.load_params(executor=exe,
                                     dirname=model_path,
                                     main_program=copy_program)
                if args.emb_quant:
                    config = {
                        'quantize_op_types': 'lookup_table',
                        'lookup_table': {
                            'quantize_type': 'abs_max'
                        },
                    }
                    copy_program = quant_embedding(copy_program, place, config)
                    fluid.io.save_persistables(exe,
                                               './output_quant/pass-' +
                                               str(epoch),
                                               main_program=copy_program)

                accum_num = 0
                accum_num_sum = 0.0
                t0 = time.time()
                step_id = 0
                for data in test_reader():
                    step_id += 1
                    b_size = len([dat[0] for dat in data])
                    wa = np.array([dat[0] for dat in data
                                   ]).astype("int64").reshape(b_size, 1)
                    wb = np.array([dat[1] for dat in data
                                   ]).astype("int64").reshape(b_size, 1)
                    wc = np.array([dat[2] for dat in data
                                   ]).astype("int64").reshape(b_size, 1)

                    label = [dat[3] for dat in data]
                    input_word = [dat[4] for dat in data]
                    para = exe.run(copy_program,
                                   feed={
                                       "analogy_a":
                                       wa,
                                       "analogy_b":
                                       wb,
                                       "analogy_c":
                                       wc,
                                       "all_label":
                                       np.arange(vocab_size).reshape(
                                           vocab_size, 1).astype("int64"),
                                   },
                                   fetch_list=[pred.name, values],
                                   return_numpy=False)
                    pre = np.array(para[0])
                    val = np.array(para[1])
                    for ii in range(len(label)):
                        top4 = pre[ii]
                        accum_num_sum += 1
                        for idx in top4:
                            if int(idx) in input_word[ii]:
                                continue
                            if int(idx) == int(label[ii][0]):
                                accum_num += 1
                            break
                    if step_id % 1 == 0:
                        print("step:%d %d " % (step_id, accum_num))

                print("epoch:%d \t acc:%.3f " %
                      (epoch, 1.0 * accum_num / accum_num_sum))