Exemple #1
0
def main(args):
    main_time = Timer()
    dataset = mnist.get_dataset(args.dataset)
    mnist.gen_model(args.model, args.loss)

    print('step, worker, samples, time, learnrate, learnrate, batchsize, trainaccuracy, testaccuracy, validation')
    sgdr(dataset, args.popsize, args.epochs, args.learnrate, args.epochmult, args.epochmin, args.opt, args.workerid)
    print('# total time %3.1f' % main_time.elapsed())
def main(args):
    main_time = Timer()
    dataset = mnist.get_dataset(args.dataset)
    mnist.gen_model(args.model, args.loss)

    print(
        'step, worker, samples, time, loops, learnrate, batchsize, trainaccuracy, testaccuracy, validation'
    )
    workers = build_workers(args.popsize, [hp.resample_learnrate],
                            [hp.perturb_learnrate])
    train_workers(dataset, workers, args.epochs, args.steps, args.cutoff,
                  args.opt)
    print('# total time %3.1f' % main_time.elapsed())
Exemple #3
0
def main(args):
    main_time = Timer()
    dataset = mnist.get_dataset(args.dataset)
    mnist.gen_model(args.model, args.loss)

    print(
        'step, worker, samples, time, loops, learnrate, batchsize, trainaccuracy, testaccuracy, validation'
    )

    search_grid_epochs(dataset, args.steps, args.learnrate, args.opt,
                       args.workerid)
    #search_grid(dataset, args.popsize, args.train_time, args.steps)
    #multi_random(dataset, args.popsize, args.train_time, args.steps)

    print('# total time %3.1f' % main_time.elapsed())
Exemple #4
0
def feed_dict():
    dataset = mnist.get_dataset('fashion')
    x, y_, train_step, learning_rate, accuracy = mnist.gen_model('conv_dropout_model', 'softmax')

    with tf.Session() as sess:
        print("feed_dict")
        sess.run(tf.global_variables_initializer())
        datasize = len(dataset.train.labels) // 4
        for batch_size in list(range(100, 3000, 100)):  # [1, 2, 4, 8, 16, 32, 64] +
            epoch_time = Timer()
            iterations = datasize // batch_size
            for _ in range(iterations):
                batch_xs, batch_ys = dataset.train.next_batch(batch_size)
                sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys})
            split = epoch_time.split()
            print('%d, %d, %3.1fs, %d/s' % (batch_size, iterations, split, datasize // split))
Exemple #5
0
from __future__ import print_function

import argparse
from importlib import import_module
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
import mnist
from timer import Timer


MNIST = mnist.get_dataset('fashion')


def main(args):
    # augment()
    # feed_dict()
    # datasets(args.model, args.loss, args.batch_size)
    for batch_size in [1, 2, 4, 8, 16, 32, 64] + list(range(100, 3000, 100)):
        if batch_size >= args.batch_size:
            tf.reset_default_graph()
            datasets(args.model, args.loss, batch_size, args.trials)


def augment():
    test_dataset = tf.data.Dataset.from_tensor_slices((MNIST.test.images, MNIST.test.labels))
    dataset = sample_pair_dataset()

    test_iterator = test_dataset.make_one_shot_iterator()
    test_example, test_label = test_iterator.get_next()