Ejemplo n.º 1
0
    # if len(past_losses) == loss_history and np.std(past_losses) <= 0.1:
    #     rate = rate * 10.0
    return max(rate, min_rate)


last_loss = None
saver = tf.train.Saver()
save_step = 10
past_losses = []
loss_history = 5
with tf.Session() as sess:
    print 'initializing'
    sess.run(init)
    print 'session launched'
    for step in range(training_iters):
        features, targets = batch(batch_size, mode='camera')
        sess.run(optimizer,
                 feed_dict={
                     x: features,
                     y: targets,
                     keep_prob: dropout,
                     learning_rate: get_learning_rate(last_loss, past_losses)
                 })
        if step % display_step == 0:
            loss = sess.run(cost,
                            feed_dict={
                                x: features,
                                y: targets,
                                keep_prob: 1.
                            })
            last_loss = loss
Ejemplo n.º 2
0
last_loss = None
saver = tf.train.Saver()
save_step = 100
past_losses = []
loss_history = 5
lowest_loss = 1000.0
lowest_iter = 0

loss_history = []
name = 'basic'
with tf.Session() as sess:
    print 'initializing'
    sess.run(init)
    print 'session launched'
    for step in range(training_iters):
        features, targets = batch(batch_size)
        sess.run(optimizer,
                 feed_dict={
                     x: features,
                     y: targets,
                     keep_prob: dropout
                 })
        if step % display_step == 0:
            loss = sess.run(cost,
                            feed_dict={
                                x: features,
                                y: targets,
                                keep_prob: 1.
                            })
            print("Iter {}, Minibatch avg error={}".format(
                step, np.sqrt(loss / (ackermann_scale**2.0))))
Ejemplo n.º 3
0
last_loss = None
saver = tf.train.Saver()
save_step = 100
past_losses = []
loss_history = 5
lowest_loss = 1000.0
lowest_iter = 0

loss_history = []
name = 'next_three'
with tf.Session() as sess:
    print 'initializing'
    sess.run(init)
    print 'session launched'
    for step in range(training_iters):
        features, targets = batch(batch_size, next_x=3)
        sess.run(optimizer,
                 feed_dict={
                     x: features,
                     y: targets,
                     keep_prob: dropout
                 })
        if step % display_step == 0:
            loss = sess.run(cost,
                            feed_dict={
                                x: features,
                                y: targets,
                                keep_prob: 1.
                            })
            print("Iter {}, Minibatch avg error={}".format(
                step, np.sqrt(loss / (ackermann_scale**2.0))))
Ejemplo n.º 4
0
last_loss = None
saver = tf.train.Saver()
save_step = 100
past_losses = []
loss_history = 5
lowest_loss = 1000.0
lowest_iter = 0

loss_history = []
name = 'no_cdf_next_three'
with tf.Session() as sess:
    print 'initializing'
    sess.run(init)
    print 'session launched'
    for step in range(training_iters):
        features, targets = batch(batch_size, next_x=3, cdf=False)
        sess.run(optimizer,
                 feed_dict={
                     x: features,
                     y: targets,
                     keep_prob: dropout
                 })
        if step % display_step == 0:
            loss = sess.run(cost,
                            feed_dict={
                                x: features,
                                y: targets,
                                keep_prob: 1.
                            })
            print("Iter {}, Minibatch avg error={}".format(
                step, np.sqrt(loss / (ackermann_scale**2.0))))
Ejemplo n.º 5
0
    return max(rate, min_rate)


last_loss = None
saver = tf.train.Saver()
save_step = 10
past_losses = []
loss_history = 5
lowest_loss = 1000.0
lowest_iter = 0
with tf.Session() as sess:
    print 'initializing'
    sess.run(init)
    print 'session launched'
    for step in range(training_iters):
        features, targets = batch(batch_size, mode='laser', laser_cdf=False)
        sess.run(optimizer,
                 feed_dict={
                     x: features,
                     y: targets,
                     keep_prob: 1.0,
                     learning_rate: get_learning_rate(last_loss)
                 })
        if step % display_step == 0:
            loss = sess.run(cost,
                            feed_dict={
                                x: features,
                                y: targets,
                                keep_prob: 1.
                            })
            last_loss = loss
Ejemplo n.º 6
0
max_rate = 1
min_rate = 0.001


def get_learning_rate(last_loss):
    if last_loss is None:
        return 1
    rate = min(last_loss / 1000.0, max_rate)
    # if len(past_losses) == loss_history and np.std(past_losses) <= 0.1:
    #     rate = rate * 10.0
    return max(rate, min_rate)


# Launch the graph
last_loss = 0
with tf.Session() as sess:
    sess.run(init)

    # Training cycle
    for epoch in range(training_epochs):
        features, targets = batch(batch_size, mode='laser')
        # Run optimization op (backprop) and cost op (to get loss value)
        _, c = sess.run([optimizer, cost],
                        feed_dict={
                            x: features,
                            y: targets,
                            learning_rate: get_learning_rate(last_loss)
                        })
        if epoch % display_step == 0:
            print("Epoch: {}, cost={}".format(epoch, c))
print("Optimization Finished!")
Ejemplo n.º 7
0
pred = conv_net(x, weights, biases, keep_prob)
print 'model constructed'

cost = tf.reduce_mean(tf.squared_difference(pred, y))
learning_rate = 0.001
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
print 'optimizer created'

init = tf.global_variables_initializer()
print 'init created'
 
saver = tf.train.Saver()
with tf.Session() as sess:
    saver.restore(sess, 'models/test.ckpt')

    features, targets = batch(1000000)
    predictions = sess.run(pred, feed_dict={x: features})
    x = [t[0]/(100) for t in targets] # remember the division by 100 for denormalization!
    y = [float(p[0])/100.0 for p in predictions]

    def bound(v):
        v = max(0.0, v)
        return min(1.0, v)

    fig, ax = plt.subplots()
    y = [bound(v) for v in y]
    x = [bound(v) for v in x]
    ax.scatter(x, y)

    lims = [
        0, 1