Example #1
0
def get_optimizer(learning_rate, hparams):
    """Get the tf.train.Optimizer for this optimizer string.

  Args:
    learning_rate: The learning_rate tensor.
    hparams: tf.contrib.training.HParams object with the optimizer and
        momentum values.

  Returns:
    optimizer: The tf.train.Optimizer based on the optimizer string.
  """
    return {
        "rmsprop":
        tf.RMSPropOptimizer(learning_rate,
                            decay=0.95,
                            momentum=hparams.momentum,
                            epsilon=1e-4),
        "adam":
        tf.AdamOptimizer(learning_rate, beta1=0.9, beta2=0.999, epsilon=1e-8),
        "adagrad":
        tf.AdagradOptimizer(learning_rate, initial_accumulator_value=1.0),
        "mom":
        tf.MomentumOptimizer(learning_rate, momentum=hparams.momentum),
        "sgd":
        tf.GradientDescentOptimizer(learning_rate)
    }.get(hparams.optimizer)
Example #2
0
def createGraph(a,b,lr,optimiser ):
    tf.reset_default_graph()
    w1 = tf.get_variable(dtype=tf.float32, shape=(), name="w1",                        initializer=tf.random_normal_initializer(0.0,1.0))
    w2 = tf.get_variable(dtype=tf.float32, shape=(), name="w2",                        initializer=tf.random_normal_initializer(0.0,1.0))
    a = tf.constant(a , name="a")
    b = tf.constant(b,  name="b")
    costFunc = tf.add( tf.square(                                 tf.subtract( a,                                             w1,                                             name='term1'),
                                name='term1_sq'),\
                   tf.multiply( b,\
                                tf.square(   tf.subtract(  \
                                                            w2,\
                                                            tf.square(  w1,\
                                                                        name='weight1_sq'),\
                                                            name='term2'),\
                                             name='term2_sq'),\
                                name='term2_Sq_Mul_b'),\
                   name='costFunc') 
    print(a,b)
    print(lr)
    print(optimiser)
    if 'gd'==optimiser:
        train_step =tf.train.GradientDescentOptimizer(learning_rate=lr, name='GradientDescent').minimize(costFunc, name= 'train_step')
    if 'gdm'==optimiser:
        train_step =tf.MomentumOptimizer(learning_rate=lr,momentum=0.9, name='Momentum').minimize(costFunc, name= 'train_step')
    if 'adam'==optimiser:
        train_step =tf.AdamOptimizer(learning_rate=lr,name='Adam').minimize(costFunc, name= 'train_step')
    
    init = tf.global_variables_initializer()
    #once you build the graph, write to file
    file_writer= tf.summary.FileWriter("./datasets/myTensorboardLogs/HW1_Oct11/", tf.get_default_graph())
    
    sess = tf.Session()
    sess.run(fetches=[init])
    for x in range(10):
        acc= sess.run(fetches=[train_step,w1,w2])
        print('Accuracy at step %s: %s' % (x, acc))
Example #3
0
File: main.py Project: yesyu/Pig2
import model
import numpy as np
import tensorflow as tf

IMAGE_SIZE = 32
IMAGE_CHANNELS = 3

image = tf.placeholder(tf.float32,
                       [None, IMAGE_SIZE, IMAGE_SIZE, IMAGE_CHANNELS])
label = tf.placeholder(tf.float32,
                       [None, IMAGE_SIZE, IMAGE_SIZE, IMAGE_CHNNELS])
learning_rate = tf.placeholder(tf.float32, [])
net = model.resnet(image, 1)
cross_entropy = tf.reduce_mean(
    tf.nn.softmax_cross_entropy_with_logits(labels=label, logits=net))
opt = tf.MomentumOptimizer(learning_rate, 0.9)
train_op = opt.minimize(cross_entropy)
correct_pre = tf.equal(tf.argmax(net, 1), tf.argmax(label, 1))
accuracy = tf.reduce_mean(tf.cast(correct_pre, dtype=tf.float32))
saver = tf.train.saver()

sess = tf.Session()
sess.run(tf.global_variables_initializer())
checkpoint = tf.train.latest_checkpoint('.')
if checkpoint is not None:
    print('Restoring from checkpoint ' + checkpoint)
    saver.restore(sess, ckeckpoint)
else:
    print('Could not find the checkpoint to restore')

sess.close()