Beispiel #1
0
def test_symbolic_initial_state():
    net = theanets.recurrent.Regressor([
        dict(size=u.NUM_INPUTS, form='input', name='h0', ndim=2),
        dict(size=u.NUM_INPUTS, form='input', name='in'),
        dict(size=u.NUM_HID1, form='rnn', name='rnn', h_0='h0'),
        dict(size=u.NUM_OUTPUTS, form='ff', name='out'),
    ])
    H0 = np.random.randn(u.NUM_EXAMPLES, u.NUM_HID1).astype('f')
    u.assert_progress(net, [H0, u.RNN.INPUTS, u.RNN.OUTPUTS])
def test_gll():
    net = theanets.Regressor([
        u.NUM_INPUTS,
        dict(name='hid', size=u.NUM_HID1),
        dict(name='covar', activation='relu', inputs='hid', size=u.NUM_OUTPUTS),
        dict(name='mean', activation='linear', inputs='hid', size=u.NUM_OUTPUTS),
    ])
    net.set_loss('gll', target=2, mean_name='mean', covar_name='covar')
    u.assert_progress(net, u.REG_DATA)
 def test_factor_nesterov(self):
     util.assert_progress(
         *util.build_factor('sgd'),
         max_gradient_norm=1)
Beispiel #4
0
 def test_factor(self):
     util.assert_progress(*util.build_factor('sgd'),
                          max_gradient_elem=1,
                          nesterov=False)
Beispiel #5
0
 def test_factor(self):
     util.assert_progress(*util.build_factor('nag'), max_gradient_elem=1)
Beispiel #6
0
 def test_factor_nesterov(self):
     util.assert_progress(*util.build_factor('sgd'), max_gradient_norm=1)
Beispiel #7
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('esgd'), learning_rate=1e-6)
Beispiel #8
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('adadelta'))
Beispiel #9
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('rmsprop'))
def test_layerwise(ae):
    u.assert_progress(ae, u.AE_DATA, algo='layerwise')
def test_downhill(ae):
    # this really tests that interaction with downhill works.
    u.assert_progress(ae, u.AE_DATA)
def test_sgd(Model, layers, weighted, data):
    u.assert_progress(Model(layers, weighted=weighted), data)
def test_kl():
    net = theanets.Regressor([
        u.NUM_INPUTS, u.NUM_HID1, (u.NUM_OUTPUTS, 'softmax')], loss='kl')
    u.assert_progress(net, [u.INPUTS, abs(u.OUTPUTS)])
def test_regression(loss):
    net = theanets.Regressor([
        u.NUM_INPUTS, u.NUM_HID1, u.NUM_OUTPUTS], loss=loss)
    u.assert_progress(net, u.REG_DATA)
 def test_factor(self):
     util.assert_progress(*util.build_factor('nag'), max_gradient_elem=1)
def test_layerwise_tied():
    ae = theanets.Autoencoder([
        u.NUM_INPUTS, u.NUM_HID1, u.NUM_HID2, (u.NUM_HID1, 'tied'),
        (u.NUM_INPUTS, 'tied')
    ])
    u.assert_progress(ae, u.AE_DATA, algo='layerwise')
def test_classification(loss):
    net = theanets.Classifier([
        u.NUM_INPUTS, u.NUM_HID1, u.NUM_CLASSES], loss=loss)
    u.assert_progress(net, u.CLF_DATA)
def test_unsupervised_pretrainer():
    u.assert_progress(theanets.Experiment(theanets.Classifier, u.CLF_LAYERS),
                      u.AE_DATA,
                      algo='pretrain')
Beispiel #19
0
 def test_factor(self):
     util.assert_progress(*util.build_factor('rmsprop'))
Beispiel #20
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('esgd'), learning_rate=1e-6)
Beispiel #21
0
 def test_factor(self):
     util.assert_progress(*util.build_factor('adam'))
Beispiel #22
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('adagrad'))
Beispiel #23
0
 def test_factor(self):
     util.assert_progress(*util.build_factor('esgd'), learning_rate=1e-6)
Beispiel #24
0
 def test_factor(self):
     util.assert_progress(*util.build_factor('adagrad'))
Beispiel #25
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('nag'))
Beispiel #26
0
 def test_factor(self):
     util.assert_progress(*util.build_factor('esgd'), learning_rate=1e-6)
Beispiel #27
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('sgd'), monitor_gradients=True)
Beispiel #28
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('rmsprop'))
 def test_factor(self):
     util.assert_progress(
         *util.build_factor('sgd'),
         max_gradient_elem=1,
         nesterov=False)
Beispiel #30
0
 def test_factor(self):
     util.assert_progress(*util.build_factor('rmsprop'))
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('nag'))
def test_sgd(Model, layers, weighted, data):
    u.assert_progress(Model(layers, weighted=weighted), data)
 def test_rosen(self):
     util.assert_progress(
         *util.build_rosen('sgd'),
         monitor_gradients=True)
Beispiel #34
0
def test_sgd(Model, layers, sparse, weighted, data):
    u.assert_progress(Model(layers[sparse], weighted=weighted), data[sparse])