train_images, train_labels = fetch_images_and_labels(TRAIN_DIR) train_images, train_labels = shaffle_images_and_labels(train_images, train_labels) test_images, test_labels = fetch_images_and_labels(TEST_DIR) test_images, test_labels = shaffle_images_and_labels(test_images, test_labels) cnn = CNN(image_size=FLAGS.image_size, class_count=len(CLASSES)) with tf.Graph().as_default(): x = tf.placeholder(tf.float32, [None, PIXEL_COUNT]) labels = tf.placeholder(tf.float32, [None, len(CLASSES)]) keep_prob = tf.placeholder(tf.float32) y = cnn.inference(x, keep_prob) v = cnn.cross_entropy(y, labels) train_step = cnn.train_step(v, FLAGS.learning_rate) accuracy = cnn.accuracy(y, labels) saver = tf.train.Saver() init = tf.global_variables_initializer() with tf.Session() as sess: sess.run(init) summary_op = tf.summary.merge_all() summary_writer = tf.summary.FileWriter(LOG_DIR, sess.graph) for i in range(FLAGS.step_count): for j in range(int(len(train_images) / FLAGS.batch_size)): batch = FLAGS.batch_size * j