示例#1
0
def load_mnist(data_dir):
    dataset = dict()
    dataset['training_set'] = dict()
    dataset['validation_set'] = dict()
    dataset['test_set'] = dict()

    mnist = load_datasets(data_dir, normalize=True, one_hot=True)

    # reshape the inputs
    set_size = mnist.train.images.shape[0]
    test_set_size = mnist.test.images.shape[0]
    dataset['training_set']['x'] = mnist.train.images.reshape(
        set_size, 28 * 28)
    dataset['test_set']['x'] = mnist.test.images.reshape(
        test_set_size, 28 * 28)

    # split into training and validation set
    validation_set_size = set_size // 6
    training_set_size = set_size - validation_set_size
    dataset['validation_set']['x'] = dataset['training_set']['x'][
        0:validation_set_size]
    dataset['validation_set']['y'] = mnist.train.labels[0:validation_set_size]
    dataset['training_set']['x'] = dataset['training_set']['x'][
        validation_set_size:set_size]
    dataset['training_set']['y'] = mnist.train.labels[
        validation_set_size:set_size]
    dataset['test_set']['y'] = mnist.test.labels

    return dataset
示例#2
0
 def __init__(self, optimizer, batch_size, n_epochs, data_dir, shards):
     self._batch_size = batch_size
     self._n_epochs = n_epochs
     self._ops = build_ops(optimizer)
     self._dataset = load_datasets(data_dir, normalize=True, one_hot=True)
     self._sess = tf.Session()
     self._sess.run(tf.global_variables_initializer())
示例#3
0
def main(do_eval=True):
    args = parse_args()
    model_dir = get_model_dir(args)

    data = load_datasets(args.data_dir, normalize=True)
    classifier = tf.estimator.Estimator(model_fn, model_dir=model_dir)

    from kungfu.tensorflow.experimental.hook import ElasticHook
    hooks = [ElasticHook(args.batch_size, args.num_epochs, MNIST_DATA_SIZE)]

    classifier.train(input_fn(data.train,
                              args.batch_size,
                              epochs=args.num_epochs),
                     hooks=hooks)

    if not do_eval:
        import time
        time.sleep(1)
        return
    results = classifier.evaluate(input_fn(data.test,
                                           args.batch_size,
                                           shuffle=False),
                                  hooks=[],
                                  steps=1)
    print('results: %s' % (results, ))
示例#4
0
文件: main.py 项目: zuston/KungFu
def build_dataset(args):
    data = load_datasets(args.data_dir, normalize=True)
    samples = data.train.images.reshape([-1, 28, 28, 1])
    labels = tf.cast(data.train.labels, tf.int32)
    samples = tf.data.Dataset.from_tensor_slices(samples)
    labels = tf.data.Dataset.from_tensor_slices(labels)
    ds = tf.data.Dataset.zip((samples, labels))
    ds = ds.batch(args.batch_size)
    ds = ds.shuffle(10000)
    ds = ds.repeat()  # repeat infinitely
    return ds
示例#5
0
def main():
    args = parse_args()
    print('using config: %s, max step=%d' % (args.schedule, args.max_step))
    model_dir = get_model_dir(args)

    data = load_datasets(args.data_dir, normalize=True)
    classifier = tf.estimator.Estimator(model_fn, model_dir=model_dir)

    classifier.train(input_fn(data.train, 1000),
                     hooks=[
                         KungFuElasticTrainHook(args.schedule, args.max_step,
                                                model_dir)
                     ],
                     max_steps=args.max_step)

    results = classifier.evaluate(input_fn(data.test, 1000, shuffle=False),
                                  hooks=[],
                                  steps=1)
    print('results: %s' % (results, ))
示例#6
0
def main(do_eval=True):
    args = parse_args()
    model_dir = get_model_dir(args)

    data = load_datasets(args.data_dir, normalize=True)
    classifier = tf.estimator.Estimator(model_fn, model_dir=model_dir)

    # resize cluster at given step
    step_schedule = {
        10: 2,
        20: 3,
        30: 4,
        40: 1,
        50: 1,
    }

    policy = ScheduledElasticPolicy(step_schedule)
    hooks = [
        PolicyHook([policy], MNIST_DATA_SIZE, args.num_epochs,
                   args.batch_size),
    ]

    classifier.train(input_fn(data.train,
                              args.batch_size,
                              epochs=args.num_epochs),
                     hooks=hooks)

    if not do_eval:
        import time
        time.sleep(1)
        return
    results = classifier.evaluate(input_fn(data.test,
                                           args.batch_size,
                                           shuffle=False),
                                  hooks=[],
                                  steps=1)
    print('results: %s' % (results, ))