pre_ind.append(i) post_ind.append(n) weights.append(ip_weights[i, n]) def create_ipb_connections(ip_n, ip_bias, pre_ind, post_ind, weights): for i in range(ip_n): pre_ind.append(0) post_ind.append(i) weights.append(ip_bias[i]) #------------------------------------------------------------------------------ # load CIFAR10 #------------------------------------------------------------------------------ cifar10_data_set = cifar10_extract.Cifar10DataSet('../dataset/') test_images, test_labels = cifar10_data_set.test_data() # crop test_images = test_images[:, 4:28, 4:28, :] * 255 #------------------------------------------------------------------------------ # set parameters and equations #------------------------------------------------------------------------------ eqs_input = '''rates : Hz dv/dt = 1 : second''' input_thresh = 'v > 1/rates' input_reset = 'v -= 1/rates' eqs_conv1 = 'v : 1' eqs_pool1 = 'v : 1'
def evaluate(): """Eval CIFAR-10 for a number of steps.""" with tf.Graph().as_default() as g: # Get images and labels for CIFAR-10. # eval_data = FLAGS.eval_data == 'test' # images, labels = cifar10.inputs(eval_data=eval_data) images = tf.placeholder('float', [FLAGS.batch_size, 24, 24, 3]) labels = tf.placeholder('int32', [FLAGS.batch_size, 10]) # Build a Graph that computes the logits predictions from the # inference model. logits = cifar10.inference(images) # lp = tf.argmax(logits, 1) # count correct prediction correct_prediction = tf.equal(tf.argmax(logits, 1), tf.argmax(labels, 1)) accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float")) # Calculate predictions. # top_k_op = tf.nn.in_top_k(logits, labels, 1) # Restore the moving average version of the learned variables for eval. variable_averages = tf.train.ExponentialMovingAverage( cifar10.MOVING_AVERAGE_DECAY) variables_to_restore = variable_averages.variables_to_restore() saver = tf.train.Saver(variables_to_restore) # eval_once(saver, summary_writer, top_k_op, summary_op) # get cifar-10 dataset cifar10_data_set = cifar10_extract.Cifar10DataSet('../dataset/') test_images, test_labels = cifar10_data_set.test_data() # crop test_images = test_images[:, 4:28, 4:28, :] - 0.5 with tf.Session() as sess: ckpt = tf.train.get_checkpoint_state(FLAGS.checkpoint_dir) if ckpt and ckpt.model_checkpoint_path: # Restores from checkpoint saver.restore(sess, ckpt.model_checkpoint_path) else: print('No checkpoint file found') return # avg = 0 # for i in xrange(100): # batch_xs = test_images[ # i * FLAGS.batch_size:(i + 1) * FLAGS.batch_size] # batch_ys = test_labels[ # i * FLAGS.batch_size:(i + 1) * FLAGS.batch_size] # avg += sess.run(accuracy, feed_dict={images: # batch_xs, labels: batch_ys}) # avg /= (100) # print("test accuracy %g" % avg) # Get conv1 output conv1 = tf.get_default_graph().get_tensor_by_name('conv1/conv1:0') cnn_conv1 = sess.run(conv1, feed_dict={ images: test_images[0:FLAGS.batch_size], labels: test_labels[0:FLAGS.batch_size] }) cnn_conv1 = cnn_conv1.reshape((FLAGS.batch_size, 24 * 24 * 64)) # Get pool1 output pool1 = tf.get_default_graph().get_tensor_by_name('pool1:0') cnn_pool1 = sess.run(pool1, feed_dict={ images: test_images[0:FLAGS.batch_size], labels: test_labels[0:FLAGS.batch_size] }) cnn_pool1 = cnn_pool1.reshape((FLAGS.batch_size, 12 * 12 * 64)) # Get conv2 output conv2 = tf.get_default_graph().get_tensor_by_name('conv2/conv2:0') cnn_conv2 = sess.run(conv2, feed_dict={ images: test_images[0:FLAGS.batch_size], labels: test_labels[0:FLAGS.batch_size] }) cnn_conv2 = cnn_conv2.reshape((FLAGS.batch_size, 12 * 12 * 64)) # Get pool2 output pool2 = tf.get_default_graph().get_tensor_by_name('pool2:0') cnn_pool2 = sess.run(pool2, feed_dict={ images: test_images[0:FLAGS.batch_size], labels: test_labels[0:FLAGS.batch_size] }) cnn_pool2 = cnn_pool2.reshape((FLAGS.batch_size, 6 * 6 * 64)) # Get local3 output local3 = tf.get_default_graph().get_tensor_by_name( 'local3/local3:0') cnn_local3 = sess.run(local3, feed_dict={ images: test_images[0:FLAGS.batch_size], labels: test_labels[0:FLAGS.batch_size] }) cnn_local3 = cnn_local3.reshape((FLAGS.batch_size, 384)) # Get local4 output local4 = tf.get_default_graph().get_tensor_by_name( 'local4/local4:0') cnn_local4 = sess.run(local4, feed_dict={ images: test_images[0:FLAGS.batch_size], labels: test_labels[0:FLAGS.batch_size] }) cnn_local4 = cnn_local4.reshape((FLAGS.batch_size, 192)) # Get softmax_linear output local5 = tf.get_default_graph().get_tensor_by_name( 'softmax_linear/softmax_linear:0') cnn_local5 = sess.run(local5, feed_dict={ images: test_images[0:FLAGS.batch_size], labels: test_labels[0:FLAGS.batch_size] }) cnn_local5 = cnn_local5.reshape((FLAGS.batch_size, 10)) sio.savemat( 'output/cifar10_cnn.mat', { 'conv1': cnn_conv1, 'pool1': cnn_pool1, 'conv2': cnn_conv2, 'pool2': cnn_pool2, 'ip1': cnn_local3, 'ip2': cnn_local4, 'ip3': cnn_local5 }) # Save weights for brian2 with tf.variable_scope('conv1', reuse=True): w_conv1 = tf.get_variable("weights") b_conv1 = tf.get_variable("biases") with tf.variable_scope('conv2', reuse=True): w_conv2 = tf.get_variable("weights") b_conv2 = tf.get_variable("biases") with tf.variable_scope('local3', reuse=True): w_local3 = tf.get_variable("weights") b_local3 = tf.get_variable("biases") with tf.variable_scope('local4', reuse=True): w_local4 = tf.get_variable("weights") b_local4 = tf.get_variable("biases") with tf.variable_scope('softmax_linear', reuse=True): w_local5 = tf.get_variable("weights") b_local5 = tf.get_variable("biases") if not tf.gfile.Exists('output/weights'): tf.gfile.MakeDirs('output/weights') save_w_conv1 = sess.run(w_conv1) save_b_conv1 = sess.run(b_conv1) save_w_conv2 = sess.run(w_conv2) save_b_conv2 = sess.run(b_conv2) save_w_local3 = sess.run(w_local3) save_b_local3 = sess.run(b_local3) save_w_local4 = sess.run(w_local4) save_b_local4 = sess.run(b_local4) save_w_local5 = sess.run(w_local5) save_b_local5 = sess.run(b_local5) sio.savemat( 'output/weights/cifar10_weights.mat', { 'conv1_w': save_w_conv1, 'conv1_b': save_b_conv1, 'conv2_w': save_w_conv2, 'conv2_b': save_b_conv2, 'ip1_w': save_w_local3, 'ip1_b': save_b_local3, 'ip2_w': save_w_local4, 'ip2_b': save_b_local4, 'ip3_w': save_w_local5, 'ip3_b': save_b_local5 })