Exemple #1
0
 def test_layer_tuples(self):
     m = theanets.Regressor((1, (2, 'relu'), 3))
     assert len(m.layers) == 3
     assert m.layers[1].kwargs['activation'] == 'relu'
Exemple #2
0
 def test_layer_dicts(self):
     m = theanets.Regressor((1, dict(size=2, activation='relu',
                                     form='rnn'), 3))
     assert len(m.layers) == 3
     assert m.layers[1].kwargs['activation'] == 'relu'
     assert isinstance(m.layers[1], theanets.layers.recurrent.RNN)
Exemple #3
0
 def test_updates(self):
     m = theanets.Regressor((15, 13))
     assert not m.updates()
Exemple #4
0
 def test_layer_ints(self):
     m = theanets.Regressor((1, 2, 3))
     assert len(m.layers) == 3
Exemple #5
0
 def _build(self, *hiddens, **kwargs):
     return theanets.Regressor(layers=(self.DIGIT_SIZE, ) + hiddens,
                               hidden_activation='logistic',
                               **kwargs)
Exemple #6
0
 def net(self):
     return theanets.Regressor((10, 15, 14, 13))
Exemple #7
0
 def test_feed_forward(self):
     net = theanets.Regressor(
         (self.NUM_INPUTS, self.a, self.b, self.l, self.NUM_OUTPUTS))
     out = net.predict(self.INPUTS)
     assert out.shape == (self.NUM_EXAMPLES, self.NUM_OUTPUTS)
import numpy as np
import theanets
import linecache

net = theanets.Regressor([353, 150, 1])
net.load('model_regressor_nomen_final')

#inputs = np.loadtxt('vector_list_usable_1')
#outputs = np.loadtxt('top_or_not_usable_binary_1')

g = np.loadtxt('vector_nomen_10')

#h = np.random.randn(34308, 1).astype('f')
#h = np.loadtxt('top_or_not_usable_binary_9')
#w = [h[x:x+1] for x in xrange(0, len(h), 1)] ##convert into list of numpy arrays
#q = np.asarray(w) ##convert into numpy array of numpy arrays

#net.train([g, q])

#net.save('model_regressor_nomen_final')

result = net.predict(g)
#score = net.score()

np.savetxt('result_regressor_nomen', result)
#np.savetxt('score', score)

#test = np.loadtxt('vector_list_usable_9')
#print(test)
def test_kl():
    net = theanets.Regressor([
        u.NUM_INPUTS, u.NUM_HID1, (u.NUM_OUTPUTS, 'softmax')], loss='kl')
    u.assert_progress(net, [u.INPUTS, abs(u.OUTPUTS)])
def test_regression(loss):
    net = theanets.Regressor([
        u.NUM_INPUTS, u.NUM_HID1, u.NUM_OUTPUTS], loss=loss)
    u.assert_progress(net, u.REG_DATA)
Exemple #11
0
 def test_kl(self):
     self.exp = theanets.Regressor(
         [self.NUM_INPUTS, 10, (self.NUM_OUTPUTS, 'softmax')], loss='kl')
     assert self.exp.losses[
         0].__class__.__name__ == 'KullbackLeiblerDivergence'
     self.assert_progress('sgd', [self.INPUTS, abs(self.OUTPUTS)])