Exemplo n.º 1
0
    # load data
    mnist = input_data.read_data_sets('data/mnist', one_hot=True)

    # Input (X) and Target (Y) placeholders, they will be fed with a batch of
    # input and target values resepectively, from the training and test sets
    X = qfns.input_placeholder()
    Y = qfns.target_placeholder()

    # Create the tensorflow computational graph for our model
    if network == "onelayer":
        w, b, logits_op, preds_op, xentropy_op, loss_op = qfns.onelayer(X, Y)
        [variable_summaries(v, name) for (v, name) in zip((w, b), ("w", "b"))]
        tf.summary.histogram('pre_activations', logits_op)
    elif network == "twolayer":
        w1, b1, w2, b2, logits_op, preds_op, xentropy_op, loss_op = \
            qfns.twolayer(X, Y)
        [
            variable_summaries(v, name)
            for (v, name) in zip((w1, b1, w2, b2), ("w1", "b1", "w2", "b2"))
        ]
        tf.summary.histogram('pre_activations', logits_op)
    elif network == "conv":
        # standard conv layers
        conv1out, conv2out, w, b, logits_op, preds_op, xentropy_op, loss_op = \
        qfns.convnet(tf.reshape(X, [-1, 28, 28, 1]), Y)
        [variable_summaries(v, name) for (v, name) in ((w, "w"), (b, "b"))]
        tf.summary.histogram('pre_activations', logits_op)
    #elif network == "rollyourown":
    ## You can define your own conv net here and play around with it
    else:
        raise ValueError("Incorrect network string in line 7")
Exemplo n.º 2
0
def runing(config, budget):
    os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
    tf.reset_default_graph()

    network = config["network"]
    print("sid {sid}, network: {network}".format(sid=sid, network=network))

    # hyperparameters
    learning_rate = config["lr"]
    batch_size = 256
    n_training_epochs = int(budget)

    # load data
    mnist = input_data.read_data_sets('data/mnist', one_hot=True)

    # Input (X) and Target (Y) placeholders, they will be fed with a batch of
    # input and target values respectively, from the training and test sets
    X = qfns.input_placeholder()
    Y = qfns.target_placeholder()

    # Create the tensorflow computational graph for our model
    if network == "onelayer":
        w, b, logits_op, preds_op, xentropy_op, loss_op = qfns.onelayer(X, Y)
        # [variable_summaries(v, name) for (v, name) in zip((w, b), ("w", "b"))]
        # tf.summary.histogram('pre_activations', logits_op)

    elif network == "twolayers":
        w1, b1, w2, b2, logits_op, preds_op, xentropy_op, loss_op = \
            qfns.twolayer(X, Y, hiddensize=30, outputsize=10)
        # [variable_summaries(v, name) for (v, name) in
        # zip((w1, b1, w2, b2), ("w1", "b1", "w2", "b2"))]
        # tf.summary.histogram('pre_activations', logits_op)

    elif network == "conv":
        # standard conv layers
        conv1out, conv2out, w, b, logits_op, preds_op, xentropy_op, loss_op = \
            qfns.convnet(tf.reshape(X, [-1, 28, 28, 1]), Y, convlayer_sizes=[10, 10],
                        filter_shape=[3, 3], outputsize=10, padding="same")
        # [variable_summaries(v, name) for (v, name) in ((w, "w"), (b, "b"))]
        # tf.summary.histogram('pre_activations', logits_op)
    else:
        raise ValueError("Incorrect network string in line 7")

    # The training op performs a step of stochastic gradient descent on a minibatch
    optimizer = tf.train.AdamOptimizer  # ADAM - widely used optimiser (ref: http://arxiv.org/abs/1412.6980)
    train_op = optimizer(learning_rate).minimize(loss_op)

    # Prediction and accuracy ops
    accuracy_op = get_accuracy_op(preds_op, Y)

    # TensorBoard for visualisation
    # Merge all the summaries and write them out to /tmp/mnist_logs (by default)
    summaries_op = tf.summary.merge_all()

    # Separate accuracy summary so we can use train and test sets
    accuracy_placeholder = tf.placeholder(shape=[], dtype=tf.float32)
    # accuracy_summary_op = tf.summary.scalar("accuracy", accuracy_placeholder)

    # When run, the init_op initialises any TensorFlow variables
    # hint: weights and biases in our case
    init_op = tf.global_variables_initializer()

    # Get started
    with tf.Session() as sess:
        sess.run(init_op)

        # Initialise TensorBoard Summary writers
        # dtstr = "{:%b_%d_%H-%M-%S}".format(datetime.now())
        # train_writer = tf.summary.FileWriter('./summaries/' + dtstr + '/train', sess.graph)
        # test_writer = tf.summary.FileWriter('./summaries/' + dtstr + '/test')

        # Train
        print('Starting Training...')
        train_accuracy, test_accuracy = train(sess, mnist, n_training_epochs,
                                              batch_size, X, Y, train_op,
                                              loss_op, accuracy_op,
                                              accuracy_placeholder)
        print('Training Complete\n')
        print(
            "train_accuracy: {train_accuracy}, test_accuracy: {test_accuracy}".
            format(**locals()))
        with open("results.csv", "a") as f:
            f.write("{0},{1},{2},{3}\n".format(sid, network, train_accuracy,
                                               train_accuracy))

    return train_accuracy, test_accuracy
    mnist = input_data.read_data_sets('data/mnist', one_hot=True)

    # Input (X) and Target (Y) placeholders, they will be fed with a batch of
    # input and target values respectively, from the training and test sets
    X = qfns.input_placeholder()
    Y = qfns.target_placeholder()

    # Create the tensorflow computational graph for our model
    if network == "onelayer":
        w, b, logits_op, preds_op, xentropy_op, loss_op = qfns.onelayer(X, Y)
        [variable_summaries(v, name) for (v, name) in zip((w, b), ("w", "b"))]
        tf.summary.histogram('pre_activations', logits_op)

    elif network == "twolayer":
        w1, b1, w2, b2, logits_op, preds_op, xentropy_op, loss_op = \
            qfns.twolayer(X, Y, hiddensize=30, outputsize=10)
        [variable_summaries(v, name) for (v, name) in
         zip((w1, b1, w2, b2), ("w1", "b1", "w2", "b2"))]
        tf.summary.histogram('pre_activations', logits_op)

    elif network == "conv":
        # standard conv layers
        conv1out, conv2out, w, b, logits_op, preds_op, xentropy_op, loss_op = \
            qfns.convnet(tf.reshape(X, [-1, 28, 28, 1]), Y, convlayer_sizes=[10, 10],
                         filter_shape=[3, 3], outputsize=10, padding="same")
        [variable_summaries(v, name) for (v, name) in ((w, "w"), (b, "b"))]
        tf.summary.histogram('pre_activations', logits_op)
    else:
        raise ValueError("Incorrect network string in line 7")

    # The training op performs a step of stochastic gradient descent on a minibatch
Exemplo n.º 4
0
    # load data
    mnist = input_data.read_data_sets('data/mnist', one_hot=True)

    # Input (X) and Target (Y) placeholders, they will be fed with a batch of
    # input and target values resepectively, from the training and test sets
    X = qfns.input_placeholder()
    Y = qfns.target_placeholder()

    # Create the tensorflow computational graph for our model
    if network == "onelayer":
        w, b, logits_op, preds_op, xentropy_op, loss_op = qfns.onelayer(X, Y)
        [variable_summaries(v, name) for (v, name) in zip((w, b), ("w", "b"))]
        tf.summary.histogram('pre_activations', logits_op)
    elif network == "twolayer":
        w1, b1, w2, b2, logits_op, preds_op, xentropy_op, loss_op = \
            qfns.twolayer(X, Y)
        [variable_summaries(v, name) for (v, name) in
            zip((w1, b1, w2, b2), ("w1", "b1", "w2", "b2"))]
        tf.summary.histogram('pre_activations', logits_op)
    elif network == "conv":
        # standard conv layers
         conv1out, conv2out, w, b, logits_op, preds_op, xentropy_op, loss_op = \
         qfns.convnet(tf.reshape(X, [-1, 28, 28, 1]), Y)
         [variable_summaries(v, name) for (v, name) in ((w,"w"), (b,"b"))]
         tf.summary.histogram('pre_activations', logits_op)
    #elif network == "rollyourown":
       ## You can define your own conv net here and play around with it
    else:
        raise ValueError("Incorrect network string in line 7")

    # The training op performs a step of stochastic gradient descent on a minibatch