Exemplo n.º 1
0
wd1_hist = tf.histogram_summary("weights dense 1", W_fc1)
wd2_hist = tf.histogram_summary("weights dense 2", W_fc2)
bc1_hist = tf.histogram_summary("biases conv 1", b_conv1)
bc2_hist = tf.histogram_summary("biases conv 2", b_conv2)
bd1_hist = tf.histogram_summary("biases dense 1", b_fc1)
bd2_hist = tf.histogram_summary("biases dense 2", b_fc2)
y_hist = tf.histogram_summary("predictions", y)
ce_summ = tf.scalar_summary("cost", cross_entropy)
accuracy_summary = tf.scalar_summary("accuracy", accuracy)
merged = tf.merge_all_summaries()

with tf.Session() as sess:
    sess.run(tf.initialize_all_variables())
    writer = tf.train.SummaryWriter("/tmp/mnist_logs", sess.graph)
    for i in range(15000+1):
        batch = mnist.next_batch(50)
        if i % 100 == 0:
            print('[Step', str(i) + '] TRAIN error:', 1-accuracy.eval(feed_dict={x: batch[0], y_: batch[1], keep_prob: 1.0}),
                  '(Crossentropy:', cross_entropy.eval(feed_dict={x: batch[0], y_: batch[1], keep_prob: 1.0}),')')
            result = sess.run([merged, accuracy], feed_dict={x: batch[0], y_: batch[1], keep_prob: 1.})
            summary_str = result[0]
            acc = result[1]
            writer.add_summary(summary_str, i)
        if i % 1000 == 0:
            batch = mnist.next_test_batch(600)
            print('TEST error:', 1-accuracy.eval(feed_dict={x: batch[0], y_: batch[1], keep_prob: 1.0}),
                  '(Crossentropy:', cross_entropy.eval(feed_dict={x: batch[0], y_: batch[1], keep_prob: 1.0}),')')
        train_step.run(feed_dict={x: batch[0], y_: batch[1], keep_prob: keep_prob_})

    # save the model
    save_path = saver.save(sess, "/tmp/mnist_statefarm.ckpt")
out_hist = tf.histogram_summary("weights output", W_fc8)
bc1_hist = tf.histogram_summary("biases conv 1", b_conv1)
bc2_hist = tf.histogram_summary("biases conv 2", b_conv2)
bc3_hist = tf.histogram_summary("biases conv 3", b_conv3)
bc4_hist = tf.histogram_summary("biases conv 4", b_conv4)
bc5_hist = tf.histogram_summary("biases conv 5", b_conv5)
y_hist = tf.histogram_summary("predictions", y)
ce_summ = tf.scalar_summary("cost", cross_entropy)
accuracy_summary = tf.scalar_summary("accuracy", accuracy)
merged = tf.merge_all_summaries()

with tf.Session() as sess:
    sess.run(tf.initialize_all_variables())
    writer = tf.train.SummaryWriter("/tmp/alexnet_logs", sess.graph)
    for i in range(15000+1):
        batch = mnist.next_batch(batch_size)
        if i % 100 == 0:
            print('[Step', str(i) + '] TRAIN error:', 1-accuracy.eval(feed_dict={x: batch[0], y_: batch[1], keep_prob: 1.0}),
                  '(Crossentropy:', cross_entropy.eval(feed_dict={x: batch[0], y_: batch[1], keep_prob: 1.0}), ')')
            result = sess.run([merged, accuracy], feed_dict={x: batch[0], y_: batch[1], keep_prob: 1.})
            summary_str = result[0]
            acc = result[1]
            writer.add_summary(summary_str, i)
        if i % 1000 == 0:
            batch = mnist.next_test_batch(batch_size)
            print('TEST error:', 1-accuracy.eval(feed_dict={x: batch[0], y_: batch[1], keep_prob: 1.0}),
                  '(Crossentropy:', cross_entropy.eval(feed_dict={x: batch[0], y_: batch[1], keep_prob: 1.0}), ')')
        train_step.run(feed_dict={x: batch[0], y_: batch[1], keep_prob: 0.5})

    print 'Making predictions on test set...'
    predictions_ = np.empty((0, 10))