Exemple #1
0
def run_benchmark(model, args):
    if args.use_cprof:
        pr = cProfile.Profile()
        pr.enable()
    start_time = time.time()
    word_dict = paddle.dataset.imdb.word_dict()

    print("load word dict successfully")

    dict_dim = len(word_dict)

    data = fluid.layers.data(name="words",
                             shape=[1],
                             dtype="int64",
                             lod_level=1)
    label = fluid.layers.data(name="label", shape=[1], dtype="int64")

    prediction = model(data, dict_dim)
    cost = fluid.layers.cross_entropy(input=prediction, label=label)
    avg_cost = fluid.layers.mean(x=cost)
    adam_optimizer = fluid.optimizer.Adam(learning_rate=0.002)
    adam_optimizer.minimize(avg_cost)
    accuracy = fluid.evaluator.Accuracy(input=prediction, label=label)

    train_reader = paddle.batch(paddle.reader.shuffle(
        paddle.dataset.imdb.train(word_dict), buf_size=25000),
                                batch_size=args.batch_size)
    place = fluid.CPUPlace() if args.device == 'CPU' else fluid.GPUPlace(0)
    exe = fluid.Executor(place)
    exe.run(fluid.default_startup_program())

    for it, pass_id in enumerate(xrange(args.pass_num)):
        accuracy.reset(exe)
        if iter == args.iterations:
            break
        for data in train_reader():
            tensor_words = to_lodtensor(map(lambda x: x[0], data), place)

            label = np.array(map(lambda x: x[1], data)).astype("int64")
            label = label.reshape([args.batch_size, 1])

            tensor_label = fluid.LoDTensor()
            tensor_label.set(label, place)

            loss, acc = exe.run(fluid.default_main_program(),
                                feed={
                                    "words": tensor_words,
                                    "label": tensor_label
                                },
                                fetch_list=[avg_cost] + accuracy.metrics)
            pass_acc = accuracy.eval(exe)
            print("Iter: %d, loss: %s, acc: %s, pass_acc: %s" %
                  (it, str(loss), str(acc), str(pass_acc)))
Exemple #2
0
def run_benchmark(model, args):
    if args.use_cprof:
        pr = cProfile.Profile()
        pr.enable()
    start_time = time.time()
    word_dict = paddle.dataset.imdb.word_dict()

    print("load word dict successfully")

    dict_dim = len(word_dict)
    data = fluid.layers.data(name="words",
                             shape=[args.seq_len * args.batch_size, 1],
                             append_batch_size=False,
                             dtype="int64",
                             lod_level=1)
    label = fluid.layers.data(name="label",
                              shape=[args.batch_size, 1],
                              append_batch_size=False,
                              dtype="int64")
    prediction = model(data, dict_dim)
    cost = fluid.layers.cross_entropy(input=prediction, label=label)
    avg_cost = fluid.layers.mean(x=cost)
    adam_optimizer = fluid.optimizer.Adam(learning_rate=0.002)
    adam_optimizer.minimize(avg_cost)
    accuracy = fluid.evaluator.Accuracy(input=prediction, label=label)

    train_reader = paddle.batch(
        paddle.reader.shuffle(paddle.dataset.imdb.train(word_dict),
                              buf_size=25000),  # only for speed
        batch_size=args.batch_size)
    place = fluid.CPUPlace() if args.device == 'CPU' else fluid.GPUPlace(0)
    exe = fluid.Executor(place)
    exe.run(fluid.default_startup_program())
    for it, pass_id in enumerate(xrange(args.pass_num)):
        accuracy.reset(exe)
        if it == args.iterations:
            break
        for batch_id, data in enumerate(train_reader()):
            chopped_data = chop_data(data,
                                     chop_len=args.seq_len,
                                     batch_size=args.batch_size)
            tensor_words, tensor_label = prepare_feed_data(chopped_data, place)

            loss, acc = exe.run(fluid.default_main_program(),
                                feed={
                                    "words": tensor_words,
                                    "label": tensor_label
                                },
                                fetch_list=[avg_cost] + accuracy.metrics)
            pass_acc = accuracy.eval(exe)
            print("pass=%d, batch=%d, loss=%f, acc=%f, pass_acc=%f" %
                  (it, batch_id, loss, acc, pass_acc))
Exemple #3
0
    def test_nvprof(self):
        if not fluid.core.is_compile_gpu():
            return
        epoc = 8
        dshape = [4, 3, 28, 28]
        data = layers.data(name='data', shape=[3, 28, 28], dtype='float32')
        conv = layers.conv2d(data, 20, 3, stride=[1, 1], padding=[1, 1])

        place = fluid.GPUPlace(0)
        exe = fluid.Executor(place)
        exe.run(fluid.default_startup_program())

        with profiler.cuda_profiler("cuda_profiler.txt", 'csv') as nvprof:
            for i in range(epoc):
                input = np.random.random(dshape).astype('float32')
                exe.run(fluid.default_main_program(), feed={'data': input})
Exemple #4
0
def run_benchmark(model, args):
    if args.use_cprof:
        pr = cProfile.Profile()
        pr.enable()
    start_time = time.time()

    class_dim = 102
    dshape = [3, 224, 224] if args.order == 'NCHW' else [224, 224, 3]
    input = fluid.layers.data(name='data', shape=dshape, dtype='float32')
    label = fluid.layers.data(name='label', shape=[1], dtype='int64')
    predict = model(input, class_dim)
    cost = fluid.layers.cross_entropy(input=predict, label=label)
    avg_cost = fluid.layers.mean(x=cost)
    optimizer = fluid.optimizer.Momentum(learning_rate=0.01, momentum=0.9)
    opts = optimizer.minimize(avg_cost)
    accuracy = fluid.evaluator.Accuracy(input=predict, label=label)

    train_reader = paddle.batch(paddle.reader.shuffle(
        paddle.dataset.flowers.train(), buf_size=5120),
                                batch_size=args.batch_size)

    place = fluid.CPUPlace() if args.device == 'CPU' else fluid.GPUPlace(0)
    exe = fluid.Executor(place)
    exe.run(fluid.default_startup_program())

    iter = 0
    for pass_id in range(args.pass_num):
        accuracy.reset(exe)
        if iter == args.iterations:
            break
        for data in train_reader():
            if iter == args.iterations:
                break
            image = np.array(map(lambda x: x[0].reshape(dshape),
                                 data)).astype('float32')
            label = np.array(map(lambda x: x[1], data)).astype('int64')
            label = label.reshape([-1, 1])
            loss, acc = exe.run(fluid.default_main_program(),
                                feed={
                                    'data': image,
                                    'label': label
                                },
                                fetch_list=[avg_cost] + accuracy.metrics)
            pass_acc = accuracy.eval(exe)
            print("Iter: %d, loss: %s, acc: %s, pass_acc: %s" %
                  (iter, str(loss), str(acc), str(pass_acc)))
            iter += 1

    duration = time.time() - start_time
    examples_per_sec = args.iterations * args.batch_size / duration
    sec_per_batch = duration / args.batch_size

    print('\nTotal examples: %d, total time: %.5f' %
          (args.iterations * args.batch_size, duration))
    print('%.5f examples/sec, %.5f sec/batch \n' %
          (examples_per_sec, sec_per_batch))

    if args.use_cprof:
        pr.disable()
        s = StringIO.StringIO()
        sortby = 'cumulative'
        ps = pstats.Stats(pr, stream=s).sort_stats(sortby)
        ps.print_stats()
        print(s.getvalue())
accuracy = fluid.evaluator.Accuracy(input=predict_word, label=next_word)

inference_program = fluid.default_main_program().clone()

test_accuracy = fluid.evaluator.Accuracy(input=predict_word,
                                         label=next_word,
                                         main_program=inference_program)
test_target = [avg_cost] + test_accuracy.metrics + test_accuracy.states
inference_program = fluid.io.get_inference_program(
    test_target, main_program=inference_program)
train_reader = paddle.batch(paddle.dataset.imikolov.train(word_dict, N),
                            BATCH_SIZE)
test_reader = paddle.batch(paddle.dataset.imikolov.test(word_dict, N),
                           BATCH_SIZE)

place = fluid.GPUPlace(0)
exe = fluid.Executor(place)
feeder = fluid.DataFeeder(
    feed_list=[first_word, second_word, third_word, forth_word, next_word],
    place=place)
exe.run(fluid.default_startup_program())

for pass_id in range(PASS_NUM):
    batch_id = 0
    accuracy.reset(exe)
    print("begin")
    print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
    time_begin = datetime.datetime.now()
    for data in train_reader():
        if batch_id % 100 == 0 and batch_id != 0:
            loss, acc = exe.run(fluid.default_main_program(),