def predict():
	"""Predict unseen images"""
	"""Step 0: load data and trained model"""
	mnist = input_data.read_data_sets("./data/", one_hot=True)
	checkpoint_dir = sys.argv[1]

	"""Step 1: build the rnn model"""
	x = tf.placeholder("float", [None, n_steps, n_input])
	y = tf.placeholder("float", [None, n_classes])

	weights = tf.Variable(tf.random_normal([n_hidden, n_classes]), name='weights')
	biases = tf.Variable(tf.random_normal([n_classes]), name='biases')

	pred = rnn_model(x, weights, biases)
	correct_pred = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
	accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))

	"""Step 2: predict new images with the trained model"""
	with tf.Session() as sess:
		sess.run(tf.initialize_all_variables())
		"""Step 2.0: load the trained model"""
		checkpoint_file = tf.train.latest_checkpoint(checkpoint_dir + 'checkpoints')
		print('Loaded the trained model: {}'.format(checkpoint_file))

		saver = tf.train.Saver()
		saver.restore(sess, checkpoint_file)

		"""Step 2.1: predict new data"""
		test_len = 500
		test_data = mnist.test.images[:test_len].reshape((-1, n_steps, n_input))
		test_label = mnist.test.labels[:test_len]
		print("Testing Accuracy:", sess.run(accuracy, feed_dict={x: test_data, y: test_label}))
def test_train_nn(train_neural_network):
    mock_session = tf.Session()
    test_x = np.random.rand(128, 32, 32, 3)
    test_y = np.random.rand(128, 10)
    test_k = np.random.rand(1)
    test_optimizer = tf.train.AdamOptimizer()

    mock_session.run = MagicMock()
    train_neural_network(mock_session, test_optimizer, test_k, test_x, test_y)

    assert mock_session.run.called, 'Session not used'

    _print_success_message()
Example #3
0
  name = sys.argv[1]
  output_filename = sys.argv[2]

  print("Running experiment: {}.".format(name))
  config = util.get_config("experiments.conf")[name]
  config["log_dir"] = util.mkdirs(os.path.join(config["log_root"], name))

  util.print_config(config)
  model = cm.CorefModel(config)

  model.load_eval_data()

  saver = tf.train.Saver()
  log_dir = config["log_dir"]

  with tf.Session() as session:
    checkpoint_path = os.path.join(log_dir, "model.max.ckpt")
    saver.restore(session, checkpoint_path)

    with open(output_filename, "w") as f:
      for example_num, (tensorized_example, example) in enumerate(model.eval_data):
        feed_dict = {i:t for i,t in zip(model.input_tensors, tensorized_example)}
        _, _, _, mention_starts, mention_ends, antecedents, antecedent_scores, head_scores = session.run(model.predictions + [model.head_scores], feed_dict=feed_dict)
        predicted_antecedents = model.get_predicted_antecedents(antecedents, antecedent_scores)
        example["predicted_clusters"], _ = model.get_predicted_clusters(mention_starts, mention_ends, predicted_antecedents)
        example["top_spans"] = zip((int(i) for i in mention_starts), (int(i) for i in mention_ends))
        example["head_scores"] = head_scores.tolist()
        f.write(json.dumps(example))
        f.write("\n")
        if example_num % 100 == 0:
          print("Decoded {} examples.".format(example_num + 1))