Exemple #1
0
 def get(self):
     result = generate_data.gen(6)     
     #result = metric_writer.write('https://shining-fire-2617.firebaseio.com/data/cloud-androidwear/testmetric.json?auth=wGLkuGRzoPqkBFvICYIhnI8jj3rUUQ9E1jZxTtoy', m.timestamp, m.value)
     #metric_list = generate_data.gen_multiple_metrics()   
     #result = metric_writer.write('https://shining-fire-2617.firebaseio.com/data/cloud-androidwear/', '.json?auth=wGLkuGRzoPqkBFvICYIhnI8jj3rUUQ9E1jZxTtoy', metric_list)
     self.response.headers['Content-Type'] = 'text/plain'
     self.response.write('ScheduledMetric: ' + str(result))
 def gen_body(n = 3):
     metric_list = generate_data.gen(n)
     rows = []
     body = {"rows":rows}
     for i in range (0, n):
         item = {"json": {"time": metric_list[i]['timestamp'], "value": metric_list[i]['value'],}}
         body['rows'].append(item)
     return body
Exemple #3
0
def main():

    graph = tf.Graph()
    with graph.as_default():

        x = tf.placeholder(tf.float32, [None, max_seq_len, 2],
                           name='x')  # input : position with noise
        y = tf.placeholder(tf.float32, [None, 2],
                           name='y')  # label : ground truth position
        l = tf.placeholder(tf.int32, [None], name='l')  # seq. length

        pred = net(x, l)
        cost = tf.nn.l2_loss(y - pred)
        tf.summary.scalar('cost', cost)
        opt = tf.train.AdamOptimizer(
            learning_rate=learning_rate).minimize(cost)

        init = tf.global_variables_initializer()

    sess = tf.Session()

    g = gen(max_seq_len=max_seq_len, min_seq_len=max_seq_len, noise=5e-2)

    with tf.Session(graph=graph) as sess:
        sess.run(init)

        merged = tf.summary.merge_all()
        writer = tf.summary.FileWriter('/tmp/localize_logs', sess.graph)

        for step, label, data, len in g.get(batch_size, training_iters):
            feed_dict = {x: data, y: label, l: len}
            # label = [batch, length, channel]
            summary, _ = sess.run([merged, opt], feed_dict=feed_dict)
            writer.add_summary(summary, step)
            if step % 100 == 0:
                prediction, loss = sess.run([pred, cost], feed_dict=feed_dict)
                print('[Step %02d] loss : %f]' % (step, loss))
                print('real :', label[0])
                print('pred :', prediction[0])

        output_graph_def = graph_util.convert_variables_to_constants(
            sess, graph.as_graph_def(), [FINAL_TENSOR_NAME])
        with gfile.FastGFile('output_graph.pb', 'wb') as f:
            f.write(output_graph_def.SerializeToString())
Exemple #4
0
def main():

    graph = tf.Graph()
    with graph.as_default():

        x = tf.placeholder(tf.float32, [None, 1, 2],
                           name='x')  # input : position with noise
        y = tf.placeholder(tf.float32, [None, 1, 2],
                           name='y')  # label : ground truth position

        with tf.variable_scope(MODEL_SCOPE, reuse=None):
            train = {}
            train['pred'], train['reset'], train['update'] = net(
                x, batch_size, None)

        with tf.variable_scope(MODEL_SCOPE, reuse=True):
            test = {}
            test['pred'], test['reset'], test['update'] = net(x, 1, True)

        cost = tf.nn.l2_loss(y - train['pred'])
        tf.summary.scalar('cost', cost)
        opt = tf.train.AdamOptimizer(
            learning_rate=learning_rate).minimize(cost)

        init = tf.global_variables_initializer()
        for v in tf.trainable_variables():
            print v.name, v.shape

    sess = tf.Session()

    g = gen(seq_len=seq_len, noise=4e-2)

    with tf.Session(graph=graph) as sess:
        saver = tf.train.Saver()

        sess.run(init)

        if load_ckpt:
            saver.restore(sess, ckpt_path)

        ##  TRAIN  ##
        if do_train:

            ## LOGGING ##
            if not os.path.exists(log_root):
                os.makedirs(log_root)
            run_id = len(os.walk('/tmp/localize_logs/').next()[1])
            writer = tf.summary.FileWriter(
                os.path.join('/tmp/localize_logs/', ('run_%02d' % run_id)),
                graph)
            merged = tf.summary.merge_all()
            #############

            for step, label, data in g.get(batch_size, training_iters):
                _ = sess.run(train['reset'])
                for x_in, y_in in zip(data, label):
                    feed_dict = {
                        x: x_in[:, np.newaxis, :],
                        y: y_in[:, np.newaxis, :]
                    }
                    # label = [batch, length, channel]
                    summary, _, _ = sess.run([merged, opt, train['update']],
                                             feed_dict=feed_dict)
                    writer.add_summary(summary, step)

                if step % 100 == 0:
                    prediction, loss = sess.run([train['pred'], cost],
                                                feed_dict=feed_dict)
                    print('[Step %02d] loss : %f]' % (step, loss))
                    #print('real :' , label[-1])
                    #print('pred :' , prediction[-1])
            saver.save(sess, ckpt_path)
        ############

        ##  SAVE  ##
        output_graph_def = graph_util.convert_variables_to_constants(
            sess, graph.as_graph_def(),
            ['model_1/pred', 'model_1/update', 'model_1/reset'])

        with gfile.FastGFile('output_graph.pb', 'wb') as f:
            f.write(output_graph_def.SerializeToString())
        ############

        ## TESTING ##
        if do_test:
            w, h = 512, 512
            frame = np.zeros((h, w, 3), dtype=np.uint8)

            for step, label, data in g.get(1, testing_iters):
                frame.fill(0)
                _ = sess.run(test['reset'])

                prvx = None
                prvy = None
                prvp = None
                err_m = 0
                err_p = 0

                for x_in, y_in in zip(data, label):
                    feed_dict = {
                        x: x_in[:, np.newaxis, :],
                        y: y_in[:, np.newaxis, :]
                    }
                    pred, _ = sess.run([test['pred'], test['update']],
                                       feed_dict=feed_dict)

                    x_in, y_in, pred = [(np.squeeze(e) * [h / 2, w / 2] +
                                         [h / 2, w / 2]).astype(np.int32)
                                        for e in (x_in, y_in, pred)]

                    y_pt = (y_in[1], y_in[0])
                    x_pt = (x_in[1], x_in[0])
                    p_pt = (pred[1], pred[0])

                    err_m += np.linalg.norm(y_in - x_in)
                    err_p += np.linalg.norm(y_in - pred)

                    cv2.circle(frame, y_pt, 5, (255, 0, 0),
                               thickness=-1)  # --> true pos, blue
                    cv2.circle(frame, x_pt, 4, (0, 255, 0),
                               thickness=1)  # --> measured pos, green
                    cv2.circle(frame, p_pt, 4, (0, 0, 255),
                               thickness=1)  # --> predicted pos, reg

                    if prvx is not None:
                        cv2.line(frame, prvy, y_pt, (255, 0, 0), 1)
                        cv2.line(frame, prvx, x_pt, (0, 255, 0), 1)
                        cv2.line(frame, prvp, p_pt, (0, 0, 255), 1)

                    prvy = y_pt
                    prvx = x_pt
                    prvp = p_pt

                    cv2.imshow('frame', frame)
                    if cv2.waitKey(20) == 27:
                        break
                print('err_m : %.2f ; err_p : %.2f' % (err_m, err_p))
                if cv2.waitKey(0) == 27:
                    break
Exemple #5
0
import numpy as np
from scipy import signal
import mnist
import matplotlib.pyplot as plt
import generate_data

np.random.seed(5)
X_train, y_train, X_test, y_test = generate_data.gen()


def tanh(x):
    return np.tanh(x)


def d_tanh(x):
    return 1 - np.tanh(x)**2


def log(x):
    return 1 / (1 + np.exp(-1 * x))


def d_log(x):
    return log(x) * (1 - log(x))


def relu(a):
    a[a < 0] = 0.
    return a