Ejemplo n.º 1
0
 def test_train(self):
     network = Network(
         model_fn='../ggcnn/data/networks/ggcnn_rss/epoch_29_model.hdf5')
     train_generator = DatasetGenerator(
         '../data/datasets/preprocessed/jacquard_samples.hdf5', 4)
     network.train(train_generator, 4, 2)
     network.train(train_generator, 4, 0)
Ejemplo n.º 2
0
    def test_wider(self):
        layer = 1
        layer2 = 4
        layer3 = 6
        network = Network(
            model_fn='../ggcnn/data/networks/ggcnn_rss/epoch_29_model.hdf5')
        network2 = network.wider(layer=layer)
        network3 = network.wider(layer=layer2)
        network4 = network.wider(layer=layer3)
        input_img = np.expand_dims(np.load('depth_inpainted.npy'), axis=2)
        input_img = np.expand_dims(input_img, axis=0)
        output1 = network.predict(input_img)
        output2 = network2.predict(input_img)
        output3 = network3.predict(input_img)
        output4 = network4.predict(input_img)

        self.assertTrue(
            len(network2.model.layers) == len(network.model.layers))
        self.assertTrue(network2.model.layers[layer].filters ==
                        network.model.layers[layer].filters * 2)
        self.assertTrue(network2.model.layers[layer + 1].output.shape ==
                        network2.model.layers[layer + 1].output.shape)

        import pylab as plt
        plt.figure()
        plt.subplot(2, 2, 1)
        plt.imshow(output1[0].squeeze())
        plt.subplot(2, 2, 2)
        plt.imshow(output2[0].squeeze())
        plt.subplot(2, 2, 3)
        plt.imshow(output3[0].squeeze())
        plt.subplot(2, 2, 4)
        plt.imshow(output4[0].squeeze())
        plt.show()
Ejemplo n.º 3
0
 def test_get_connectivity(self):
     network = Network(
         model_fn='../ggcnn/data/networks/ggcnn_rss/epoch_29_model.hdf5')
     connectivity = network.get_connectivity()
     connectivity2 = network.get_connectivity(layer_idx=3)
     self.assertTrue(connectivity == [-1, 0, 1, 2, 3, 4, 5, 6, 6, 6, 6])
     self.assertTrue(connectivity2 == [-1, 0, 1, 2, 3, 3, 3, 3])
Ejemplo n.º 4
0
 def test_conv_layer_idxs(self):
     network = Network(
         model_fn='../ggcnn/data/networks/ggcnn_rss/epoch_29_model.hdf5')
     layers = network.conv_layer_idxs
     self.assertTrue([1, 2, 3] == layers)
     deeper = network.deeper(3)
     layers2 = deeper.conv_layer_idxs
     self.assertTrue(layers2 == [1, 2, 3, 4])
Ejemplo n.º 5
0
 def test_get_expandable_layer_idxs(self):
     network = Network(
         model_fn='../ggcnn/data/networks/ggcnn_rss/epoch_29_model.hdf5')
     layers = network.get_expandable_layer_idxs()
     self.assertTrue([1, 2, 3] == layers)
     deeper = network.deeper(3)
     layers2 = deeper.conv_layer_idxs
     self.assertTrue(layers2 == [1, 2, 3, 4])
     layers = network.get_expandable_layer_idxs(transpose=True)
     self.assertTrue([1, 2, 3, 4, 5, 6] == layers)
Ejemplo n.º 6
0
    def test_layer_sizes(self):
        test_netowrk = Network(cf, interaction_map)

        input_to_layer = tf.sparse_placeholder(
            tf.float32,
            shape=[None, interaction_map.interaction_class_cnt],
            name="interaction_feature")
        out_layer = test_netowrk.predict(input_to_layer)
        out_layer_shape = out_layer.get_shape().as_list()
        self.assertTrue(out_layer_shape == [None, cf.embedding_size],
                        msg="layer shape")
Ejemplo n.º 7
0
def main(args):
    np.random.seed(0xC0FFEE)
    n = Network()
    n.layers.append( Fullconnect(2, 10, ReLu.function, ReLu.derivative) )
    n.layers.append( Fullconnect(10, 2) )

    x = np.array([[1, 2, 1, 2,  5, 6, 5, 6],
                  [5, 4, 4, 5,  1, 2, 2, 1]])
    t = np.array([[1, 1, 1, 1,  0, 0, 0, 0],
                  [0, 0, 0, 0,  1, 1, 1, 1]])

    for epoch in range(0, 20):
        loss = n.train( x, t )

    pkl.dump( n.dump_params().copy(), open(args.dump_params, 'wb') )
    logging.info('pickle dump done')


    nn = Network()
    nn.layers.append( Fullconnect(2, 10, ReLu.function, ReLu.derivative) )
    nn.layers.append( Fullconnect(10, 2) )


    nn.load_params( pkl.load( open('test.pkl', 'rb') ).copy() )
    logging.info('pickle load done')

    print 'before:', [['%.2f'%_ for _ in v] for v in n.predict( x )]
    print 'after: ', [['%.2f'%_ for _ in v] for v in nn.predict( x )]
Ejemplo n.º 8
0
 def test_reconnect_model(self):
     network = Network(
         model_fn='../ggcnn/data/networks/ggcnn_rss/epoch_29_model.hdf5')
     layer = keras.layers.Conv2D(8,
                                 kernel_size=(3, 3),
                                 strides=(2, 2),
                                 padding='same',
                                 activation='relu',
                                 name='test_layer')
     layer2 = keras.layers.Conv2D(8,
                                  kernel_size=(3, 3),
                                  strides=(2, 2),
                                  padding='same',
                                  activation='relu',
                                  name='test_layer')
     layer3 = keras.layers.Conv2D(32,
                                  kernel_size=(3, 3),
                                  strides=(2, 2),
                                  padding='same',
                                  activation='relu',
                                  name='test_layer')
     network1 = Network(model=network.reconnect_model(3, [layer]))
     self.assertTrue(
         str(network1) ==
         'C9x9x32_C5x5x16_C3x3x8_C3x3x8_T3x3x8_T5x5x16_T9x9x32')
     network2 = Network(
         model=network.reconnect_model(3, [layer2], replace=True))
     self.assertTrue(
         str(network2) == 'C9x9x32_C5x5x16_C3x3x8_T3x3x8_T5x5x16_T9x9x32')
     network3 = Network(model=network.reconnect_model(6, [layer3]))
     self.assertTrue(
         str(network3) ==
         'C9x9x32_C5x5x16_C3x3x8_T3x3x8_T5x5x16_T9x9x32_C3x3x32')
Ejemplo n.º 9
0
def main(args):
    np.random.seed(0xC0FFEE)

    train, test, dicts = pkl.load( open('datas/atis.pkl', 'r') )
    index2words = {value:key for key, value in dicts['words2idx'].iteritems()}
    index2tables = {value:key for key, value in dicts['tables2idx'].iteritems()}
    index2labels = {value:key for key, value in dicts['labels2idx'].iteritems()}

    train_lex, train_ne, train_y = train
    test_lex, test_ne, test_y = test
    vocsize = len(dicts['words2idx']) + 1
    nclasses = len(dicts['labels2idx'])
    nsentences = len(train_lex)

    context_window_size = 7

    learning_rate = 0.01
    n = Network()
    n.layers.append( Fullconnect(vocsize, 100, Tanh.function, Tanh.derivative, updater=GradientDescent(learning_rate)) )
    n.layers.append( BiRecurrent(100, 100, Tanh.function, Tanh.derivative, updater=GradientDescent(learning_rate)) )
    n.layers.append( Fullconnect(100, nclasses, updater=GradientDescent(learning_rate)) )
    n.activation = Softmax()

    for epoch in range(0, 11):
        epoch_loss = 0
        for i in xrange(nsentences):
            cwords = contextwin(train_lex[i], context_window_size)
            words, labels = onehotvector(cwords, vocsize, train_y[i], nclasses)

            loss = n.train( words, labels ) / len(words) # sequence normalized loss
            epoch_loss += loss
            if i%1000 == 0:
                logging.info( 'epoch:%04d iter:%04d loss:%.2f'%(epoch, i, epoch_loss/(i+1)) )

        logging.info( 'epoch:%04d loss:%.2f'%(epoch, epoch_loss/nsentences) )

        for i in range(20):
            idx = random.randint(0, len(test_lex)-1)
            cwords = contextwin(test_lex[idx], context_window_size)
            words = onehotvector(cwords, vocsize)[0]
            labels = test_y[idx]
            _ = n.predict(words)
            y = [np.argmax(prediction) for prediction in _]
            #print _
            #print y

            print 'word:   ', ' '.join([index2words[_] for _ in test_lex[idx]])
            print 'label:  ', ' '.join([index2labels[_] for _ in labels])
            print 'predict:', ' '.join([index2labels[_] for _ in y])
Ejemplo n.º 10
0
def main():
    """A simple test to calculate the round trip time of
    packets

    This test was performed on: 3/21/2018
    Results: Overwhelmingly successful. See a fuller description
    on the log page:
    (https://classes.engineering.wustl.edu/ese205/core/index.php?title=Pi_Car_Comm_Log#Timing_Test_.28Finally.29)

    :return: None
    """
    network = Network(1024, 10)
    while True:
        message = "<token>Token data</token>"  # send token data every 2 seconds
        network.broadcast(message)
        time.sleep(2)
Ejemplo n.º 11
0
def init(data, names):
    # load data.xyz as appropriate
    data.game = GameState(names)
    data.controller = Controller()
    data.network = Network()

    data.game_list = [data.game]
Ejemplo n.º 12
0
def main():
    # Open a serial port to give the Arduino commands
    ser = serial.Serial(
        port="/dev/ttyACM0",
        baudrate=9600,
    )
    start = datetime.datetime.now()
    # Create a network that can read in packets of length 1024 bytes or less
    # and that stores the last 10 messages ir received.
    network = Network(1024, 10)
    # Start listening for messages (UDP packets).
    network.start_listening(socket.SOCK_DGRAM)
    try:
        while True:
            time = datetime.datetime.now() - start
            incoming_message = network.read(network.buffer_size)
            for message in incoming_message:
                try:
                    print("Message from: " + message.find_values("f"))
                    print("Steering: " + message.find_values("command").split(".")[1])
                    print("Throttle: " + message.find_values("command").split(".")[0])
                    print("Time since startup: " + str(time))
                except Exception:
                    pass
            """incoming_command = network.read(network.buffer_size)
            if len(incoming_command) > 0:
                incoming_command = incoming_command[0].find_values("command")
                if len(incoming_command) > 0:
                    tmp = incoming_command[0].split(".")
                    esc = tmp[0].strip()
                    steer = tmp[1].strip()
                    ser.write(("e" + esc + "s" + steer).encode('utf-8'))
                    time.sleep(0.01)"""
    except KeyboardInterrupt:
        network.stop_listening()
Ejemplo n.º 13
0
 def test_evaluate_loss(self):
     node = Network(
         model_fn='../data/networks/ggcnn_rss/epoch_29_model.hdf5')
     op = NetworkOptimization(
         eval_method='loss',
         dataset_fn='../data/datasets/preprocessed/jacquard_samples.hdf5',
         epochs=1,
         debug=True)
     op.evaluate(node)
Ejemplo n.º 14
0
def main(args):
    logging.info("load data start")
    train_lex, train_y = pkl.load(open("datas/kowiki_spacing_train.pkl", "r"))
    words2idx = pkl.load(open("datas/kowiki_dict.pkl", "r"))
    logging.info("load data done")

    index2words = {value: key for key, value in words2idx.iteritems()}

    vocsize = len(words2idx) + 1
    nclasses = 2
    nsentences = len(train_lex)
    max_iter = min(args.samples, nsentences)
    logging.info(
        "vocsize:%d, nclasses:%d, nsentences:%d, samples:%d, max_iter:%d"
        % (vocsize, nclasses, nsentences, args.samples, max_iter)
    )

    context_window_size = args.window_size

    n = Network()
    n.layers.append(Fullconnect(vocsize, 256, Tanh.function))
    n.layers.append(Recurrent(256, 256, ReLU.function))
    n.layers.append(Fullconnect(256, 256, ReLU.function))
    n.layers.append(Fullconnect(256, nclasses))
    n.activation = Softmax(is_zero_pad=True)

    if not os.path.isfile(args.params):
        logging.error("not exist parameter file: %s" % args.params)
        return

    n.load_params(pkl.load(open(args.params, "rb")))

    for i in xrange(max_iter):
        cwords = contextwin(train_lex[i], context_window_size)
        words, labels = onehotvector(cwords, vocsize)

        y_list = [np.argmax(_) for _ in n.predict(words)]

        result_list = []
        for idx, y in zip(train_lex[i], y_list):
            if y == 1:
                result_list.append(" ")
            result_list.append(index2words[idx].encode("utf8"))
        print "".join(result_list)
Ejemplo n.º 15
0
def main():
    """A receiver module for the travel_time test. This module will
    receive packets and compare their timestamp to the current
    time to see how long they spent in transit. It will write these
    data to an output file for viewing.
    :return: None

    """
    network = Network(1024, 10)
    network.start_listening(socket.SOCK_DGRAM)
    out_file = open("timing_output.log", "w")
    start = datetime.datetime.now()
    while (datetime.datetime.now() - start) < datetime.timedelta(seconds=30):
        try:
            unreads = network.read(network.buffer_size)                         # read out all unread messages
            for unread in unreads:
                # noinspection PyBroadException
                try:
                    str_time_sent = unread.find_values("s")[0].split("_")       # read in the timestamp
                    time_sent = datetime.datetime(
                        year        =   int(str_time_sent[0]),
                        month       =   int(str_time_sent[1]),
                        day         =   int(str_time_sent[2]),
                        hour        =   int(str_time_sent[3]),
                        minute      =   int(str_time_sent[4]),
                        second      =   int(str_time_sent[5]),
                        microsecond =   int(str_time_sent[6])
                    )
                    current_time = datetime.datetime.now()
                    delta_time = (current_time - time_sent).total_seconds()     # compare timestamp with current time
                    out_file.write(                                             # log the incoming message to a file
                        "Received message: "
                        + unread.content
                        + "At time: "
                        + str(current_time)
                        + ". The time of flight was: "
                        + str(delta_time)
                        + " seconds"
                        + "\n"
                    )
                except Exception:                                               # Ignore corrupted messages
                    continue
        except KeyboardInterrupt:               # Allow the user to stop execution with the keyboard.
            network.stop_listening()
            out_file.close()
    network.stop_listening()
    out_file.close()
Ejemplo n.º 16
0
 def test_run_short(self):
     node = Network(
         model_fn='../data/networks/ggcnn_rss/epoch_29_model.hdf5')
     op = NetworkOptimization(
         eval_method='iou',
         dataset_fn='../data/datasets/preprocessed/jacquard_samples.hdf5',
         epochs=0,
         debug=True)
     [nodes, scores, actions] = op.run(node)
     print 'Nodes: {}'.format(nodes)
     print 'Scores: {}'.format(scores)
     print 'Actions: {}'.format(actions)
Ejemplo n.º 17
0
 def test_run_short_transpose(self):
     node = Network(model_fn='../data/networks/shallow/epoch_50_model.hdf5')
     op = NetworkOptimization(
         eval_method='iou',
         dataset_fn='../data/datasets/preprocessed/jacquard_samples.hdf5',
         epochs=0,
         debug=True,
         expand_transpose=True)
     [nodes, scores, actions] = op.run(node, depth=2, k=1)
     print 'Nodes: {}'.format(nodes)
     print 'Scores: {}'.format(scores)
     print 'Actions: {}'.format(actions)
Ejemplo n.º 18
0
 def test_expand(self):
     node = Network(
         model_fn='../data/networks/ggcnn_rss/epoch_29_model.hdf5')
     op = NetworkOptimization(
         eval_method='iou',
         dataset_fn='../data/datasets/preprocessed/jacquard_samples.hdf5')
     children, scores, actions = op.expand(node)
     self.assertTrue(len(children) == 6)
     self.assertTrue((actions == [
         'deeper_conv_1', 'wider_conv_1', 'deeper_conv_2', 'wider_conv_2',
         'deeper_conv_3', 'wider_conv_3'
     ]))
Ejemplo n.º 19
0
 def test_evaluate(self):
     node = Network(
         model_fn='../data/networks/ggcnn_rss/epoch_29_model.hdf5')
     op = NetworkOptimization(
         eval_method='iou',
         dataset_fn='../data/datasets/preprocessed/jacquard_samples.hdf5')
     op.min_iou = 0.
     ev1 = op.evaluate(node)
     op.min_iou = 1.
     ev2 = op.evaluate(node)
     self.assertTrue(ev1 > 0)
     self.assertTrue(ev2 == 0)
Ejemplo n.º 20
0
    def test_wider_on_smaller_network(self):
        input = np.reshape(np.array([[7, 8, 9], [5, 6, 4], [4, 1, 3]]),
                           (1, 3, 3, 1)).astype(np.float64)
        n_filters = 2
        i = keras.layers.Input((3, 3, 1))
        l1 = keras.layers.Conv2D(n_filters,
                                 padding='same',
                                 kernel_size=(3, 3),
                                 activation='relu')(i)
        l2 = keras.layers.Conv2D(n_filters * 2,
                                 padding='same',
                                 kernel_size=(3, 3),
                                 activation='relu')(l1)
        l3 = keras.layers.Conv2DTranspose(n_filters,
                                          padding='same',
                                          kernel_size=(3, 3),
                                          activation='relu')(l2)

        model = keras.models.Model(i, l3)

        network = Network(model=model)
        network2 = network.wider(1)

        model2 = network2.copy_model()

        # Hidden outputs
        modelh1 = keras.models.Model(model.layers[0].output,
                                     model.layers[2].output)
        modelh2 = keras.models.Model(model2.layers[0].output,
                                     model2.layers[2].output)

        hidden1 = modelh1.predict(input)
        hidden2 = modelh2.predict(input)

        self.assertTrue((np.round(hidden1, 3) == np.round(hidden2, 3)).all())

        final1 = model.predict(input)
        final2 = model2.predict(input)

        self.assertTrue((np.round(final1, 3) == np.round(final2, 3)).all())
Ejemplo n.º 21
0
    def test_wider_at_hidden(self):
        layer = 2
        network = Network(
            model_fn='../ggcnn/data/networks/ggcnn_rss/epoch_29_model.hdf5')
        network2 = network.wider(layer=layer)
        input_img = np.expand_dims(np.load('depth_inpainted.npy'), axis=2)
        input_img = np.expand_dims(input_img, axis=0)
        input_img = np.ones(input_img.shape)

        m1 = network.model
        m2 = network2.model
        hidden1 = m1.layers[layer + 1](m1.layers[layer].output)
        hidden2 = m2.layers[layer + 1](m2.layers[layer].output)

        mh1 = keras.models.Model(m1.layers[0].output, hidden1)
        mh2 = keras.models.Model(m2.layers[0].output, hidden2)

        o1 = mh1.predict(input_img)
        o2 = mh2.predict(input_img)

        for i in range(o1.shape[3]):
            print i, np.amax(np.abs(o1[:, :, :, i] - o2[:, :, :, i]))
Ejemplo n.º 22
0
    def test__initialize_params_(self):
        preheated_embeddings = np.array([
            np.array([-1.0, -1.0, -6.0]),
            np.array([-1.0, -1.0, 5.0]),
            np.array([-1.0, -1.0, 4.0]),
            np.array([-1.0, 3.0, -1.0]),
            np.array([1.0, 1.0, -1.0]),
            np.array([2.0, 1.0, -1.0]),
            np.array([1.0, -1.0, 1.0]),
            np.array([1.0, -1.0, -1.0])
        ])

        interaction_sparse_tensor = interaction_map.idxs_to_tf([2])

        test_netowrk = Network(cf, interaction_map, preheated_embeddings)

        with tf.Session() as sess:
            sess.run(tf.global_variables_initializer())
            result = sess.run(
                test_netowrk.embedd_interaction_sparse_tensor(
                    interaction_sparse_tensor))

        np.testing.assert_array_equal(result, [[-1.0, -1.0, 4.0]])
Ejemplo n.º 23
0
    def test_deeper(self):
        network = Network(
            model_fn='../ggcnn/data/networks/ggcnn_rss/epoch_29_model.hdf5')
        network2 = network.deeper(layer=2)
        network3 = network.deeper(layer=5)
        network4 = network.deeper(layer=6)
        input_img = np.expand_dims(np.load('depth_inpainted.npy'), axis=2)
        input_img = np.expand_dims(input_img, axis=0)
        output1 = network.predict(input_img)
        output2 = network2.predict(input_img)
        output3 = network3.predict(input_img)
        output4 = network4.predict(input_img)

        self.assertTrue(
            str(network2) ==
            'C9x9x32_C5x5x16_C5x5x16_C3x3x8_T3x3x8_T5x5x16_T9x9x32')
        self.assertTrue(
            str(network3) ==
            'C9x9x32_C5x5x16_C3x3x8_T3x3x8_T5x5x16_T5x5x16_T9x9x32')
        self.assertTrue(
            str(network4) ==
            'C9x9x32_C5x5x16_C3x3x8_T3x3x8_T5x5x16_T9x9x32_T9x9x32')
        for o in [output2, output3, output4]:
            self.assertTrue((o[0] == output1[0]).all())
Ejemplo n.º 24
0
def main(args):
    np.random.seed(0xC0FFEE)
    n = Network()
    n.layers.append(Fullconnect(2, 10, ReLu.function, ReLu.derivative))
    n.layers.append(Fullconnect(10, 2))

    x = np.array([[1, 2, 1, 2, 5, 6, 5, 6], [5, 4, 4, 5, 1, 2, 2, 1]])
    t = np.array([[1, 1, 1, 1, 0, 0, 0, 0], [0, 0, 0, 0, 1, 1, 1, 1]])

    for epoch in range(0, 20):
        loss = n.train(x, t)

    pkl.dump(n.dump_params().copy(), open(args.dump_params, 'wb'))
    logging.info('pickle dump done')

    nn = Network()
    nn.layers.append(Fullconnect(2, 10, ReLu.function, ReLu.derivative))
    nn.layers.append(Fullconnect(10, 2))

    nn.load_params(pkl.load(open('test.pkl', 'rb')).copy())
    logging.info('pickle load done')

    print 'before:', [['%.2f' % _ for _ in v] for v in n.predict(x)]
    print 'after: ', [['%.2f' % _ for _ in v] for v in nn.predict(x)]
Ejemplo n.º 25
0
    def test_layer_sizes(self):
        cf = Config()
        cf.n_chars = 33
        cf.string_length = 199
        cf.n_syllables = 11
        cf.syllable_length = 3
        cf.word_length = 2
        cf.n_words = 7
        cf.output_number = 101
        cf.n_classes = 11
        cf.strides1 = 1
        cf.strides2 = 1

        test_netowrk = Network(cf)

        # layer 0 --------------------------
        input_to_layer = tf.placeholder(tf.float32, [None, 199, 33])
        out_layer = test_netowrk.layer0(input_to_layer)
        out_layer_shape = out_layer.get_shape().as_list()
        self.assertTrue(out_layer_shape == [None, 199, 33, 1],
                        msg="layer 0 shape comp")

        # layer 1 --------------------------
        input_to_layer = tf.placeholder(tf.float32, [None, 199, 33, 1])
        out_layer = test_netowrk.layer1(input_to_layer)
        out_layer_shape = out_layer.get_shape().as_list()
        self.assertTrue(out_layer_shape == [None, 197, 1, 11],
                        msg="layer 1 shape comp")

        # layer 2 --------------------------
        input_to_layer = tf.placeholder(tf.float32, [None, 197, 1, 11])
        out_layer = test_netowrk.layer2(input_to_layer)
        out_layer_shape = out_layer.get_shape().as_list()
        self.assertTrue(out_layer_shape == [None, 196, 1, 7],
                        msg="layer 2 shape comp")

        # layer 3 --------------------------
        input_to_layer = tf.placeholder(tf.float32, [None, 196, 1, 7])
        out_layer = test_netowrk.layer3(input_to_layer)
        out_layer_shape = out_layer.get_shape().as_list()
        self.assertTrue(out_layer_shape == [None, 101],
                        msg="layer 3 shape comp")

        # layer 4 --------------------------
        input_to_layer = tf.placeholder(tf.float32, [None, 101])
        out_layer = test_netowrk.layer4(input_to_layer)
        out_layer_shape = out_layer.get_shape().as_list()
        self.assertTrue(out_layer_shape == [None, 11],
                        msg="layer 3 shape comp")
Ejemplo n.º 26
0
def main(args):
    logging.info('load data start')
    train_lex, train_y = pkl.load(open('datas/kowiki_spacing_train.pkl', 'r'))
    words2idx = pkl.load(open('datas/kowiki_dict.pkl', 'r'))
    logging.info('load data done')

    index2words = {value: key for key, value in words2idx.iteritems()}

    vocsize = len(words2idx) + 1
    nclasses = 2
    nsentences = len(train_lex)
    max_iter = min(args.samples, nsentences)
    logging.info(
        'vocsize:%d, nclasses:%d, nsentences:%d, samples:%d, max_iter:%d' %
        (vocsize, nclasses, nsentences, args.samples, max_iter))

    context_window_size = args.window_size

    n = Network()
    n.layers.append(Fullconnect(vocsize, 256, Tanh.function))
    n.layers.append(Recurrent(256, 256, ReLU.function))
    n.layers.append(Fullconnect(256, 256, ReLU.function))
    n.layers.append(Fullconnect(256, nclasses))
    n.activation = Softmax(is_zero_pad=True)

    if not os.path.isfile(args.params):
        logging.error('not exist parameter file: %s' % args.params)
        return

    n.load_params(pkl.load(open(args.params, 'rb')))

    for i in xrange(max_iter):
        cwords = contextwin(train_lex[i], context_window_size)
        words, labels = onehotvector(cwords, vocsize)

        y_list = [np.argmax(_) for _ in n.predict(words)]

        result_list = []
        for idx, y in zip(train_lex[i], y_list):
            if y == 1:
                result_list.append(' ')
            result_list.append(index2words[idx].encode('utf8'))
        print ''.join(result_list)
Ejemplo n.º 27
0
def main():
    # Create a serial port with which to communicate with an Arduino
    ser = serial.Serial(
        port='/dev/ttyACM0',
        baudrate=9600,
    )
    # Create a `Network` that can read in packets that are less than
    # or equal to 1024 bytes, and that stores the last 10 messages
    # it received.
    network = Network(1024, 10)
    try:
        while True:
            # noinspection PyBroadException
            try:
                # Decode the info coming from the Arduino
                command = ser.readline().decode('utf-8')
                # Broadcast the command to any Raspberry Pi's that are
                # listening.
                network.broadcast("<command>" + command + "</command>")
            except Exception:
                pass
    except KeyboardInterrupt:
        network.close_broadcast()
Ejemplo n.º 28
0
class VirtualMachinesPool(object):
    pool = list()
    using = list()
    network = Network()
    lock = Lock()
    platforms = Platforms
    preloader = None
    artifact_collector = None

    def __str__(self):
        return str(self.pool)

    def __init__(self, app):
        self.app = app
        self.platforms()
        self.start_workers(app)

    @classmethod
    def start_workers(cls, app):
        cls.app = app
        cls.artifact_collector = ArtifactCollector(cls)
        cls.preloader = VirtualMachinesPoolPreloader(cls)
        cls.preloader.start()

    @classmethod
    def stop_workers(cls):
        if cls.preloader:
            cls.preloader.stop()
        if cls.artifact_collector:
            cls.artifact_collector.close()

    @classmethod
    def remove_vm(cls, vm):
        if vm in list(cls.using):
            try:
                cls.using.remove(vm)
            except ValueError:
                log.warning("VM %s not found in using" % vm.name)
        if vm in list(cls.pool):
            try:
                cls.pool.remove(vm)
            except ValueError:
                log.warning("VM %s not found in pool" % vm.name)

    @classmethod
    def add_vm(cls, vm, to=None):
        if to is None:
            to = cls.pool
        to.append(vm)

    @classmethod
    def free(cls):
        log.info("Deleting using machines...")
        for vm in list(cls.using):
            cls.using.remove(vm)
            vm.delete(try_to_rebuild=False)
        log.info("Deleting pool...")
        for vm in list(cls.pool):
            cls.pool.remove(vm)
            vm.delete(try_to_rebuild=False)
        cls.network.delete()

    @classmethod
    def count(cls):
        return len(cls.pool) + len(cls.using)

    @classmethod
    def can_produce(cls, platform):
        platform_limit = cls.platforms.get_limit(platform)

        if platform_limit is UnlimitedCount:
            return True

        if cls.count() >= platform_limit:
            log.debug('Can\'t produce new virtual machine with platform %s: '
                      'not enough Instances resources' % platform)
            return False
        else:
            return True

    @classmethod
    def has(cls, platform):
        for vm in cls.pool:
            if vm.platform == platform and vm.ready and not vm.checking:
                return True
        return False

    @classmethod
    def get_by_platform(cls, platform):
        res = None

        with cls.lock:
            if not cls.has(platform):
                return None

            for vm in sorted(cls.pool, key=lambda v: v.created, reverse=True):
                if vm.platform == platform and vm.ready and not vm.checking:
                    log.info("Got VM %s (ip=%s, ready=%s, checking=%s)" %
                             (vm.name, vm.ip, vm.ready, vm.checking))
                    cls.pool.remove(vm)
                    cls.using.append(vm)
                    res = vm
                    break

        if not res:
            return None

        if res.ping_vm():
            return res
        else:
            cls.using.remove(res)
            res.delete()
            return None

    @classmethod
    def get_by_name(cls, _name=None):
        # TODO: remove get_by_name
        if _name:
            log.debug('Getting VM: %s' % _name)
            for vm in cls.pool + cls.using:
                if vm.name == _name:
                    return vm

    @classmethod
    def count_virtual_machines(cls, it):
        result = defaultdict(int)
        for vm in it:
            result[vm.platform] += 1

        return result

    @classmethod
    def pooled_virtual_machines(cls):
        return cls.count_virtual_machines(cls.pool)

    @classmethod
    def using_virtual_machines(cls):
        return cls.count_virtual_machines(cls.using)

    @classmethod
    def add(cls, platform, prefix="ondemand", to=None):
        if prefix == "preloaded":
            log.info("Preloading %s." % platform)

        if to is None:
            to = cls.using

        with cls.lock:
            if not cls.can_produce(platform):
                return None

            origin = cls.platforms.get(platform)
            try:
                clone = origin.make_clone(origin, prefix, cls)
            except Exception as e:
                log.exception('Exception during initializing vm object: %s' %
                              e.message)
                return None

            cls.add_vm(clone, to)

        try:
            clone.create()
        except Exception as e:
            log.exception("Error creating vm: %s" % e.message)
            clone.delete()
            try:
                to.remove(clone)
            except ValueError:
                log.warning("VM %s not found while removing" % clone.name)
            return None

        return clone

    @classmethod
    def get_vm(cls, platform):
        vm = cls.get_by_platform(platform)

        if vm:
            return vm

        vm = cls.add(platform)

        if vm:
            return vm

    @classmethod
    def save_artifact(cls, session_id, artifacts):
        return cls.artifact_collector.add_tasks(session_id, artifacts)

    @classmethod
    def preload(cls, origin_name, prefix=None):
        return cls.add(origin_name, prefix, to=cls.pool)

    @classmethod
    def return_vm(cls, vm):
        cls.using.remove(vm)
        cls.pool.append(vm)

    @property
    def info(self):
        def print_view(lst):
            return [{
                "name": l.name,
                "ip": l.ip,
                "ready": l.ready,
                "checking": l.checking,
                "created": l.created
            } for l in lst]

        return {
            "pool": {
                'count': self.pooled_virtual_machines(),
                'list': print_view(self.pool),
            },
            "using": {
                'count': self.using_virtual_machines(),
                'list': print_view(self.using),
            },
            "already_use": self.count(),
        }
Ejemplo n.º 29
0
def main(args):
    np.random.seed(0xC0FFEE)

    train, test, dicts = pkl.load( open('datas/atis.pkl', 'r') )
    index2words = {value:key for key, value in dicts['words2idx'].iteritems()}
    index2tables = {value:key for key, value in dicts['tables2idx'].iteritems()}
    index2labels = {value:key for key, value in dicts['labels2idx'].iteritems()}

    datas = [
            {'name':'train', 'x':train[0], 'y':train[2], 'size':len(train[0])},
            {'name':'test',  'x':test[0],  'y':test[2], 'size':len(test[0])},
            ]

    vocsize = len(dicts['words2idx']) + 1
    nclasses = len(dicts['labels2idx'])
    context_window_size = args.window_size

    n = Network()
    # word embedding layer
    n.layers.append( Fullconnect(vocsize, 256,                   Tanh.function, Tanh.derivative) )
    # recurrent layer
    n.layers.append( Recurrent(n.layers[-1].output_size, 256,    ReLU.function, ReLU.derivative) )
    n.layers.append( Dropout(n.layers[-1].output_size, 256, 0.5,  ReLU.function, ReLU.derivative) )
    n.layers.append( Fullconnect(n.layers[-1].output_size, nclasses) )
    n.activation = Softmax(is_zero_pad=True)

    if not os.path.isfile( args.params ):
        logging.error('not exist params: %s'%args.params)
        return

    fname = args.params
    n.load_params( pkl.load( open(fname, 'rb') ) )
    logging.info('load parameters at %s'%(fname))


    # prediction setup for evaluation
    for l, layer in enumerate(n.layers):
        if 'Dropout' == type( layer ).__name__:
            n.layers[l].is_testing = True

    data = datas[1]
    max_iteration = data['size']
    results = {'p':[], 'g':[], 'w':[]}
    for i in range(max_iteration):
        idx = i
        x = data['x'][idx]
        labels = data['y'][idx]

        cwords = contextwin(datas[1]['x'][idx], context_window_size)
        words = onehotvector(cwords, vocsize)[0]
        _ = n.predict(words)

        y = [np.argmax(prediction) for prediction in _]

        results['p'].append( [index2tables[_] for _ in y] )
        results['g'].append( [index2tables[_] for _ in labels] )
        results['w'].append( [index2words[_] for _ in x] )

    rv = conlleval(results['p'], results['g'], results['w'], 'atis_test_file.tmp')
    logging.info('evaluation result: %s'%(str(rv)))

    for i in range(20):
        idx = random.randint(0, datas[1]['size']-1)
        x = datas[1]['x'][idx]
        labels = datas[1]['y'][idx]

        cwords = contextwin(datas[1]['x'][idx], context_window_size)
        words = onehotvector(cwords, vocsize)[0]
        _ = n.predict(words)

        y = [np.argmax(prediction) for prediction in _]

        print 'word:   ', ' '.join([index2words[_] for _ in x])
        print 'table:  ', ' '.join([index2tables[_] for _ in labels])
        print 'label:  ', ' '.join([index2labels[_] for _ in labels])
        print 'predict:', ' '.join([index2labels[_] for _ in y])
Ejemplo n.º 30
0
for idx, image_fn in enumerate(image_fns):
    # Upscale images to 300x300
    images[idx] = resize(io.imread(image_fn, as_gray=True), (300, 300),
                         anti_aliasing=True,
                         mode='constant')
    images[idx] = gaussian(grey_closing(images[idx], (7, 7)),
                           1,
                           preserve_range=True)

    # Reverse values to denote distance from camera
    images[idx] = np.max(images[idx]) - images[idx]
    # if idx == 19:
    #     break

for model_fn in MODEL_FNS:
    model_name = model_fn.split('/')[-2]
    rss = Network(model_fn)
    pos, ang, wid = rss.predict(images)

    for idx in range(images.shape[0]):
        output_fn = os.path.join(RESULTS_PATH, model_name, image_ids[idx])
        output_path = '/'.join(output_fn.split('/')[:-1])
        if not os.path.exists(output_path):
            os.makedirs(output_path)
        network.save_output_plot(images[idx],
                                 pos[idx],
                                 ang[idx],
                                 wid[idx],
                                 filename=output_fn)
Ejemplo n.º 31
0
from keras.utils import np_utils

# Load MNIST
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(x_train.shape[0], 1, 28 * 28)
x_train = x_train.astype("float32")
x_train /= 255
y_train = np_utils.to_categorical(y_train)

x_test = x_test.reshape(x_test.shape[0], 1, 28 * 28)
x_test = x_test.astype("float32")
x_test /= 255
y_test = np_utils.to_categorical(y_test)

# Model
nn = Network()
nn.add(Dense(28 * 28, 100))
nn.add(Activation(Tanh, dTanh))
nn.add(Dense(100, 50))
nn.add(Activation(Tanh, dTanh))
nn.add(Dense(50, 10))
nn.add(Activation(Tanh, dTanh))

# Training

nn.useLoss(MSE, dMSE)
nn.useOptimizer(RMSProp(),learning_rate=config.learning_rate, beta=config.beta)
nn.fit(x_train[0:2000], y_train[0:2000], epochs=config.epochs)


# Prediction
Ejemplo n.º 32
0
from core.network import Network


with open('snek-learning.yaml', 'r') as file:
    data = yaml.load(file)
    #data = data[:1000]

learning_data = [{'inputs': list(row[:5]), 'outputs': [row[5]]} for row in data]


inputs = InputLayer([1, 1, 1, 1, 1])
middle = Layer(5)
middle3 = Layer(3)
output = OutputLayer([1])

network = Network([inputs, middle, middle3, output], 'snek')
network.connect()
network.load_learning_data(learning_data)

network.print_data()
folds = 4
result = [0]
i = 0
start_time = time()
while sum(result) / folds < 90:
    results = network.learn_kfolds(folds, times=1)
    scores = [(n, [round(x) for x in out] == check) for n, out, check in results]
    stuff = Counter(scores)
    result = [stuff[(i, True)] / (stuff[(i, False)] + stuff[(i, True)]) * 100 for i in range(folds)]
    print("folds results: ", result)
    i += 1
Ejemplo n.º 33
0
learning_data = []

with open('../data/training_data/iris.data', newline='') as csvfile:
    reader = csv.reader(csvfile)
    for row in reader:
        learning_data.append({
            'inputs': [float(x) for x in row[0:4]],
            'outputs': iris_map[row[4]]
        })

inputs = InputLayer([1, 1, 1, 1])
middle = Layer(5)
middle2 = Layer(3)
output = OutputLayer([1, 1, 1])

network = Network([inputs, middle, middle2, output], 'iris')
network.connect()

network.load_learning_data(learning_data)
network.normalize_learning_data()
print("------- learn -------")
network.print_data()

folds = 4
results = network.learn_kfolds(folds, times=1000)
scores = [(n, [round(x) for x in out] == check) for n, out, check in results]
stuff = Counter(scores)
print("folds results: ", [stuff[(i, True)] / (stuff[(i, False)] + stuff[(i, True)]) * 100 for i in range(folds)])

i = [
    [6.9, 3.1, 5.1, 2.3],  # Iris-virginica
Ejemplo n.º 34
0
def main(args):
    np.random.seed(0xC0FFEE)

    logging.info('load data start')
    train_lex, train_y = pkl.load( open('datas/kowiki_spacing_train.pkl', 'r') )
    words2idx = pkl.load( open('datas/kowiki_dict.pkl', 'r') )
    logging.info('load data done')

    index2words = {value:key for key, value in words2idx.iteritems()}

    vocsize = len(words2idx) + 1
    nclasses = 2
    nsentences = len(train_lex)

    context_window_size = args.window_size
    minibatch = args.minibatch
    learning_rate = args.learning_rate
    logging.info('vocsize:%d, nclasses:%d, window-size:%d, minibatch:%d, learning-rate:%.5f'%(vocsize, nclasses, context_window_size, minibatch, learning_rate))

    n = Network()
    n.layers.append( Fullconnect(vocsize, 256, Tanh.function, Tanh.derivative,  updater=GradientDescent(learning_rate)) )
    n.layers.append( Recurrent(256, 256, ReLU.function, ReLU.derivative, updater=GradientDescent(learning_rate)) )
    n.layers.append( Fullconnect(256, 256, ReLU.function, ReLU.derivative, updater=GradientDescent(learning_rate)) )
    n.layers.append( Fullconnect(256, nclasses, updater=GradientDescent(learning_rate)) )
    n.activation = Softmax(is_zero_pad=True)

    if os.path.isfile( args.params ):
        logging.info('load parameters from %s'%args.params)
        n.load_params( pkl.load(open(args.params, 'rb')) )

    logging.info('train start')
    for epoch in xrange(0, args.epoch):
        epoch_loss = 0
        epoch_error_rate = 0
        max_iterations = min(args.samples, nsentences) / minibatch
        for i in xrange( max_iterations ):
            max_size_of_sequence = 100
            idxs = [random.randint(0, nsentences-1) for _ in range(minibatch)]
            cwords = [contextwin(train_lex[idx][:max_size_of_sequence], context_window_size) for idx in idxs]
            words_labels = [onehotvector(cword, vocsize, train_y[idx][:max_size_of_sequence], nclasses) for idx, cword in zip(idxs, cwords)]

            words = [word for word, label in words_labels]
            labels = [label for word, label in words_labels]

            # zero padding for minibatch
            max_size_of_sequence = max( [_.shape[0] for _ in words] )
            for k, (word, label) in enumerate(zip(words, labels)):
                size_of_sequence = word.shape[0]
                words[k]  = np.pad(word,  ((0, max_size_of_sequence-size_of_sequence), (0, 0)), mode='constant')
                labels[k] = np.pad(label, ((0, max_size_of_sequence-size_of_sequence), (0, 0)), mode='constant')

            words  = np.swapaxes( np.array(words),  0, 1 )
            labels = np.swapaxes( np.array(labels), 0, 1 )

            loss = n.train( words, labels ) / (max_size_of_sequence * minibatch) # sequence normalized loss
            predictions = n.y
            error_rate = n.activation.error( predictions, labels ) / (max_size_of_sequence * minibatch)

            epoch_loss += loss
            epoch_error_rate += error_rate
            if i%10 == 0 and i != 0:
                logging.info('[%.4f%%] epoch:%04d iter:%04d loss:%.5f error-rate:%.5f'%((i+1)/float(max_iterations), epoch, i, epoch_loss/(i+1), epoch_error_rate/(i+1)))

        logging.info('epoch:%04d loss:%.5f, error-rate:%.5f'%(epoch, epoch_loss/max_iterations, epoch_error_rate/max_iterations))
        pkl.dump( n.dump_params(), open(args.params, 'wb') )
        logging.info('dump parameters at %s'%(args.params))
    if args.gui:
        import pylab as plt
        plt.figure()

    if args.debug:
        plt.ion()

    sim = Simulator(use_egl=False, gui=args.gui)  # Change to no gui
    sim.cam.pos = [
        0.,
        np.cos(np.deg2rad(args.angle)) * args.distance,
        np.sin(np.deg2rad(args.angle)) * args.distance
    ]
    sim.add_gripper(os.environ['GRIPPER_PATH'])

    net = Network(model_fn=args.network)

    _global_start = time.time()
    for scene_idx in range(len(scenes)):
        try:
            scene_name = scenes_ds['name'][scene_idx]
            logging.debug('Testing scene %s' % scene_name)
            sim.restore(scenes[scene_idx], os.environ['MODELS_PATH'])
            # Get the gripper out of the way so it doesn't interfere with camera
            sim.teleport_to_pose([0., 0., 10.], [0., 0., 0.], 0.)

            _, depth = sim.cam.snap()

            logging.debug('Predicting')
            _start = time.time()