示例#1
0
def main(args):
    main_time = Timer()
    dataset = mnist.get_dataset(args.dataset)
    mnist.gen_model(args.model, args.loss)

    print('step, worker, samples, time, learnrate, learnrate, batchsize, trainaccuracy, testaccuracy, validation')
    sgdr(dataset, args.popsize, args.epochs, args.learnrate, args.epochmult, args.epochmin, args.opt, args.workerid)
    print('# total time %3.1f' % main_time.elapsed())
def main(args):
    main_time = Timer()
    dataset = mnist.get_dataset(args.dataset)
    mnist.gen_model(args.model, args.loss)

    print(
        'step, worker, samples, time, loops, learnrate, batchsize, trainaccuracy, testaccuracy, validation'
    )
    workers = build_workers(args.popsize, [hp.resample_learnrate],
                            [hp.perturb_learnrate])
    train_workers(dataset, workers, args.epochs, args.steps, args.cutoff,
                  args.opt)
    print('# total time %3.1f' % main_time.elapsed())
示例#3
0
def main(args):
    main_time = Timer()
    dataset = mnist.get_dataset(args.dataset)
    mnist.gen_model(args.model, args.loss)

    print(
        'step, worker, samples, time, loops, learnrate, batchsize, trainaccuracy, testaccuracy, validation'
    )

    search_grid_epochs(dataset, args.steps, args.learnrate, args.opt,
                       args.workerid)
    #search_grid(dataset, args.popsize, args.train_time, args.steps)
    #multi_random(dataset, args.popsize, args.train_time, args.steps)

    print('# total time %3.1f' % main_time.elapsed())
示例#4
0
def feed_dict():
    dataset = mnist.get_dataset('fashion')
    x, y_, train_step, learning_rate, accuracy = mnist.gen_model('conv_dropout_model', 'softmax')

    with tf.Session() as sess:
        print("feed_dict")
        sess.run(tf.global_variables_initializer())
        datasize = len(dataset.train.labels) // 4
        for batch_size in list(range(100, 3000, 100)):  # [1, 2, 4, 8, 16, 32, 64] +
            epoch_time = Timer()
            iterations = datasize // batch_size
            for _ in range(iterations):
                batch_xs, batch_ys = dataset.train.next_batch(batch_size)
                sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys})
            split = epoch_time.split()
            print('%d, %d, %3.1fs, %d/s' % (batch_size, iterations, split, datasize // split))