예제 #1
0
def test_symbolic_initial_state():
    net = theanets.recurrent.Regressor([
        dict(size=u.NUM_INPUTS, form='input', name='h0', ndim=2),
        dict(size=u.NUM_INPUTS, form='input', name='in'),
        dict(size=u.NUM_HID1, form='rnn', name='rnn', h_0='h0'),
        dict(size=u.NUM_OUTPUTS, form='ff', name='out'),
    ])
    H0 = np.random.randn(u.NUM_EXAMPLES, u.NUM_HID1).astype('f')
    u.assert_progress(net, [H0, u.RNN.INPUTS, u.RNN.OUTPUTS])
예제 #2
0
def test_gll():
    net = theanets.Regressor([
        u.NUM_INPUTS,
        dict(name='hid', size=u.NUM_HID1),
        dict(name='covar', activation='relu', inputs='hid', size=u.NUM_OUTPUTS),
        dict(name='mean', activation='linear', inputs='hid', size=u.NUM_OUTPUTS),
    ])
    net.set_loss('gll', target=2, mean_name='mean', covar_name='covar')
    u.assert_progress(net, u.REG_DATA)
예제 #3
0
 def test_factor_nesterov(self):
     util.assert_progress(
         *util.build_factor('sgd'),
         max_gradient_norm=1)
예제 #4
0
 def test_factor(self):
     util.assert_progress(*util.build_factor('sgd'),
                          max_gradient_elem=1,
                          nesterov=False)
예제 #5
0
 def test_factor(self):
     util.assert_progress(*util.build_factor('nag'), max_gradient_elem=1)
예제 #6
0
 def test_factor_nesterov(self):
     util.assert_progress(*util.build_factor('sgd'), max_gradient_norm=1)
예제 #7
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('esgd'), learning_rate=1e-6)
예제 #8
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('adadelta'))
예제 #9
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('rmsprop'))
예제 #10
0
def test_layerwise(ae):
    u.assert_progress(ae, u.AE_DATA, algo='layerwise')
예제 #11
0
def test_downhill(ae):
    # this really tests that interaction with downhill works.
    u.assert_progress(ae, u.AE_DATA)
예제 #12
0
def test_sgd(Model, layers, weighted, data):
    u.assert_progress(Model(layers, weighted=weighted), data)
예제 #13
0
def test_kl():
    net = theanets.Regressor([
        u.NUM_INPUTS, u.NUM_HID1, (u.NUM_OUTPUTS, 'softmax')], loss='kl')
    u.assert_progress(net, [u.INPUTS, abs(u.OUTPUTS)])
예제 #14
0
def test_regression(loss):
    net = theanets.Regressor([
        u.NUM_INPUTS, u.NUM_HID1, u.NUM_OUTPUTS], loss=loss)
    u.assert_progress(net, u.REG_DATA)
예제 #15
0
 def test_factor(self):
     util.assert_progress(*util.build_factor('nag'), max_gradient_elem=1)
예제 #16
0
def test_layerwise_tied():
    ae = theanets.Autoencoder([
        u.NUM_INPUTS, u.NUM_HID1, u.NUM_HID2, (u.NUM_HID1, 'tied'),
        (u.NUM_INPUTS, 'tied')
    ])
    u.assert_progress(ae, u.AE_DATA, algo='layerwise')
예제 #17
0
def test_classification(loss):
    net = theanets.Classifier([
        u.NUM_INPUTS, u.NUM_HID1, u.NUM_CLASSES], loss=loss)
    u.assert_progress(net, u.CLF_DATA)
예제 #18
0
def test_unsupervised_pretrainer():
    u.assert_progress(theanets.Experiment(theanets.Classifier, u.CLF_LAYERS),
                      u.AE_DATA,
                      algo='pretrain')
예제 #19
0
 def test_factor(self):
     util.assert_progress(*util.build_factor('rmsprop'))
예제 #20
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('esgd'), learning_rate=1e-6)
예제 #21
0
 def test_factor(self):
     util.assert_progress(*util.build_factor('adam'))
예제 #22
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('adagrad'))
예제 #23
0
 def test_factor(self):
     util.assert_progress(*util.build_factor('esgd'), learning_rate=1e-6)
예제 #24
0
 def test_factor(self):
     util.assert_progress(*util.build_factor('adagrad'))
예제 #25
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('nag'))
예제 #26
0
 def test_factor(self):
     util.assert_progress(*util.build_factor('esgd'), learning_rate=1e-6)
예제 #27
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('sgd'), monitor_gradients=True)
예제 #28
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('rmsprop'))
예제 #29
0
 def test_factor(self):
     util.assert_progress(
         *util.build_factor('sgd'),
         max_gradient_elem=1,
         nesterov=False)
예제 #30
0
 def test_factor(self):
     util.assert_progress(*util.build_factor('rmsprop'))
예제 #31
0
 def test_rosen(self):
     util.assert_progress(*util.build_rosen('nag'))
예제 #32
0
def test_sgd(Model, layers, weighted, data):
    u.assert_progress(Model(layers, weighted=weighted), data)
예제 #33
0
 def test_rosen(self):
     util.assert_progress(
         *util.build_rosen('sgd'),
         monitor_gradients=True)
예제 #34
0
def test_sgd(Model, layers, sparse, weighted, data):
    u.assert_progress(Model(layers[sparse], weighted=weighted), data[sparse])