Exemplo n.º 1
0
 def test_loss(self):
   nnet, weightTerms = function_fit.inference(self.x, self.hidden1, self.hidden2)
   modError, modLoss = function_fit.loss(nnet, weightTerms, self.regLambda, self.y)
   init_op = tf.initialize_all_variables()
   with tf.Session() as sess:
     sess.run(init_op)
     print("loss=%r" % sess.run(modLoss, feed_dict={self.x:np.ones((3,1), dtype=np.float32), 
                                                    self.y:np.ones((3,1), dtype=np.float32)}))
Exemplo n.º 2
0
 def test_inference(self):
   nnet, weightTerms = function_fit.inference(self.x, self.hidden1, self.hidden2)
   self.assertEqual(len(weightTerms),3, msg='expected 3 weight terms')
   init_op = tf.initialize_all_variables()
   with tf.Session() as sess:
     sess.run(init_op)
     nnetEval, w0, w1, w2 = sess.run([nnet, weightTerms[0], weightTerms[1], weightTerms[2]], 
                                        feed_dict={self.x:np.ones((3,1), dtype=np.float32)})
     print("nnet=%r\n w0=%r\n w1=%r\n w2=%r" % (nnetEval, w0, w1, w2))
Exemplo n.º 3
0
def mainAfterWithGraph(functionData, mod):
    # Generate placeholders for the input and labels
    mod.x_input = tf.placeholder(tf.float32, shape=(None, 1))
    mod.y_output = tf.placeholder(tf.float32, shape=(None, 1))

    # Build a Graph that computes predictions from the inference model.
    mod = ffit.inference(mod, mod.x_input, FLAGS.LayerNodes,
                   FLAGS.actfn, FLAGS.regNorm)

    # Add to the Graph the Ops for loss calculation.
    mod.modelError, mod.loss = ffit.loss(mod.nnetModel, mod.W_regterms, FLAGS.reg,
                                         mod.y_output)

    # Add to the Graph the Ops that calculate and apply gradients.
    mod = ffit.training(mod, mod.loss, FLAGS.learning_rate)
    
    # Build the summary operation based on the TF collection of Summaries.
#    mod.train_summary_op = tf.merge_all_summaries()

    # Create a saver for writing training checkpoints.
#    saver = tf.train.Saver()

    # Create a session for running Ops on the Graph.
    sess = tf.Session(config=tf.ConfigProto(intra_op_parallelism_threads=4))

    # Run the Op to initialize the variables.
    init = tf.initialize_all_variables()

    sess.run(init)

    # Instantiate a SummaryWriter to output summaries and the Graph.
 #   summary_writer = tf.train.SummaryWriter(FLAGS.train_dir,
 #                                           graph_def=sess.graph_def)

    h5out = tfh5.TensorFlowH5Writer(FLAGS.summaryfile)
    FLAGS.h5write(h5out.h5)
    functionData.h5write(h5out.h5)

    train_feed_dict = {mod.x_input: functionData.x_train,
                       mod.y_output:functionData.y_train}

    test_feed_dict = {mod.x_input: functionData.x_test,
                      mod.y_output:functionData.y_test}

    all_feed_dict = {mod.x_input: functionData.x_all,
                     mod.y_output:functionData.y_all}

    # And then after everything is built, start the training loop.
#    saver.restore(sess, FLAGS.train_dir)

    mod.grad = None
    mod.gradMag = None

    for step in xrange(FLAGS.max_steps):
        trainStep(step, mod, sess, h5out, train_feed_dict, test_feed_dict, all_feed_dict)