Beispiel #1
0
 def test_SquareKernel(self):
     self._run(
         MLPR(layers=[
             C("Rectifier", channels=4, kernel_shape=(3, 3)),
             L("Linear")
         ],
              n_iter=1))
Beispiel #2
0
 def test_DropoutPerLayer(self):
     nn = MLPR(layers=[L("Rectifier", units=8, dropout=0.25), L("Linear")],
               regularize='dropout',
               n_iter=1)
     assert_equal(nn.regularize, 'dropout')
     self._run(nn)
     assert_in('Using `dropout` for regularization.', self.output.getvalue())
Beispiel #3
0
 def test_DropoutExplicit(self):
     nn = MLPR(layers=[L("Tanh", units=8), L("Linear",)],
               regularize='dropout',
               n_iter=1)
     assert_equal(nn.regularize, 'dropout')
     self._run(nn)
     assert_true(nn.cost is not None)
Beispiel #4
0
 def test_RegularizeCustomParam(self):
     nn = MLPR(layers=[L("Tanh", units=8), L("Linear",)],
               weight_decay=0.01,
               n_iter=1)
     assert_equal(nn.weight_decay, 0.01)
     self._run(nn)
     assert_true(nn.cost is not None)
Beispiel #5
0
 def test_VerticalKernel(self):
     self._run(
         MLPR(layers=[
             C("Rectifier", channels=4, kernel_shape=(16, 1)),
             L("Linear")
         ],
              n_iter=1))
    def run_EqualityTest(self, copier, asserter):
        for activation in ["Rectifier", "Sigmoid", "Maxout", "Tanh"]:
            nn1 = MLPR(layers=[L(activation, units=16, pieces=2), L("Linear", units=1)], random_state=1234)
            nn1._initialize(self.a_in, self.a_out)

            nn2 = copier(nn1, activation)
            asserter(numpy.all(nn1.predict(self.a_in) == nn2.predict(self.a_in)))
Beispiel #7
0
 def test_RegularizeExplicitL2(self):
     nn = MLPR(layers=[L("Sigmoid", units=8), L("Softmax",)],
               regularize='L2',
               n_iter=1)
     assert_equal(nn.regularize, 'L2')
     self._run(nn)
     assert_in('Using `L2` for regularization.', self.output.getvalue())
Beispiel #8
0
 def test_RegularizeExplicitL1(self):
     nn = MLPR(layers=[L("Tanh", units=8), L("Linear",)],
               regularize='L1',
               n_iter=1)
     assert_equal(nn.regularize, 'L1')
     self._run(nn)
     assert_in('Using `L1` for regularization.', self.output.getvalue())
Beispiel #9
0
 def test_DropoutPerLayer(self):
     nn = MLPR(layers=[L("Maxout", units=8, pieces=2, dropout=0.25), L("Linear")],
               regularize='dropout',
               n_iter=1)
     assert_equal(nn.regularize, 'dropout')
     self._run(nn)
     assert_true(nn.cost is not None)
Beispiel #10
0
 def test_RegularizeCustomParam(self):
     nn = MLPR(layers=[L("Tanh", units=8), L("Linear",)],
               weight_decay=0.01,
               n_iter=1)
     assert_equal(nn.weight_decay, 0.01)
     self._run(nn)
     assert_in('Using `L2` for regularization.', self.output.getvalue())
Beispiel #11
0
 def test_LeakyRectifier(self):
     nn = MLPR(layers=[
         N(ly.DenseLayer, units=24, nonlinearity=nl.leaky_rectify),
         L("Linear")
     ],
               n_iter=1)
     self._run(nn)
Beispiel #12
0
 def test_DropoutExplicit(self):
     nn = MLPR(layers=[L("Tanh", units=8), L("Linear",)],
               regularize='dropout',
               n_iter=1)
     assert_equal(nn.regularize, 'dropout')
     self._run(nn)
     assert_in('Using `dropout` for regularization.', self.output.getvalue())
Beispiel #13
0
    def test_UnusedParameterWarning(self):
        nn = MLPR(layers=[L("Linear", pieces=2)], n_iter=1)
        a_in = numpy.zeros((8, 16))
        nn._initialize(a_in, a_in)

        assert_in('Parameter `pieces` is unused', self.buf.getvalue())
        self.buf = io.StringIO()  # clear
Beispiel #14
0
    def test_RegressorGlobalParams(self):
        a_in = numpy.random.uniform(0.0, 1.0, (64, 16))
        a_out = numpy.zeros((64, 1))

        clf = GridSearchCV(MLPR(layers=[L("Linear")], n_iter=1),
                           param_grid={'learning_rate': [0.01, 0.001]})
        clf.fit(a_in, a_out)
Beispiel #15
0
 def test_VerboseRegressor(self):
     nn = MLPR(layers=[L("Linear")], verbose=1, n_iter=1)
     a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
     nn.fit(a_in, a_out)
     assert_in("Epoch       Training Error       Validation Error       Time", self.buf.getvalue())
     assert_in("    1       ", self.buf.getvalue())
     assert_in("    N/A     ", self.buf.getvalue())
Beispiel #16
0
 def test_HorizontalKernel(self):
     self._run(
         MLPR(layers=[
             C("Rectifier", channels=4, kernel_shape=(1, 16)),
             L("Linear")
         ],
              n_iter=1))
Beispiel #17
0
    def test_Regressor(self):
        a_in = numpy.random.uniform(0.0, 1.0, (64, 16))
        a_out = numpy.zeros((64, 1))

        cross_val_score(MLPR(layers=[L("Linear")], n_iter=1),
                        a_in,
                        a_out,
                        cv=5)
Beispiel #18
0
 def test_ValidationSet(self):
     self._run(
         MLPR(layers=[
             C("Rectifier", channels=4, kernel_shape=(3, 3)),
             L("Linear")
         ],
              n_iter=1,
              valid_size=0.5))
Beispiel #19
0
 def test_AutomaticRegularize(self):
     nn = MLPR(
         layers=[L("Tanh", units=8, weight_decay=0.0001),
                 L("Linear")],
         n_iter=1)
     self._run(nn)
     assert_in('Using `L2` for regularization, auto-enabled from layers.',
               self.output.getvalue())
Beispiel #20
0
 def ctor(_, activation):
     nn = MLPR(layers=[
         L(activation, units=16, pieces=2),
         L("Linear", units=1)
     ],
               random_state=1234)
     nn._initialize(self.a_in, self.a_out)
     return nn
    def setUp(self):
        self.called = 0
        self.value = 1.0

        self.nn = MLPR(layers=[L("Linear")],
                       n_iter=1,
                       batch_size=1,
                       callback={'on_batch_start': self._mutate_fn})
Beispiel #22
0
 def setUp(self):
     self.nn = MLPR(layers=[
         C("Rectifier", channels=6, kernel_shape=(3, 3)),
         C("Sigmoid", channels=4, kernel_shape=(5, 5)),
         C("Tanh", channels=8, kernel_shape=(3, 3)),
         L("Linear")
     ],
                    n_iter=1)
Beispiel #23
0
 def test_GaussianNoise(self):
     nn = MLPR(layers=[
         L("Rectifier", units=12),
         N(ly.GaussianNoiseLayer),
         L("Linear")
     ],
               n_iter=1)
     self._run(nn)
Beispiel #24
0
 def test_DropoutAsFloat(self):
     nn = MLPR(layers=[L("Tanh", units=8), L("Linear",)],
               dropout_rate=0.25,
               n_iter=1)
     assert_equal(nn.regularize, 'dropout')
     assert_equal(nn.dropout_rate, 0.25)
     self._run(nn)
     assert_in('Using `dropout` for regularization.', self.output.getvalue())
Beispiel #25
0
 def test_DropoutAsFloat(self):
     nn = MLPR(layers=[L("Tanh", units=8), L("Linear",)],
               dropout_rate=0.25,
               n_iter=1)
     assert_equal(nn.regularize, 'dropout')
     assert_equal(nn.dropout_rate, 0.25)
     self._run(nn)
     assert_true(nn.cost is not None)
Beispiel #26
0
    def test_GetParamsThenConstructor(self):
        nn1 = MLPR(layers=[L("Linear")], n_iter=1)
        a_in, a_out = numpy.zeros((8, 16)), numpy.zeros((8, 4))
        nn1._initialize(a_in, a_out)

        p1 = nn1.get_parameters()
        print(len(p1))
        nn2 = MLPR(layers=[L("Linear")], n_iter=1, parameters=p1)
        nn2._initialize(a_in, a_out)
        p2 = nn2.get_parameters()
        print(len(p2))

        assert_true(
            (p1[0].weights.astype('float32') == p2[0].weights.astype('float32')
             ).all())
        assert_true(
            (p1[0].biases.astype('float32') == p2[0].biases.astype('float32')
             ).all())
Beispiel #27
0
 def setUp(self):
     self.nn = MLPR(layers=[
         L("Rectifier", units=16),
         L("Sigmoid", units=12),
         L("Maxout", units=16, pieces=2),
         L("Tanh", units=4),
         L("Linear")
     ],
                    n_iter=1)
Beispiel #28
0
 def test_VerticalVerbose(self):
     self._run(
         MLPR(layers=[
             C("Rectifier", channels=4, kernel_shape=(16, 1)),
             L("Linear")
         ],
              n_iter=1,
              verbose=1,
              valid_size=0.1))
Beispiel #29
0
 def test_MultipleLayers(self):
     self._run(
         MLPR(layers=[
             C("Rectifier", channels=6, kernel_shape=(3, 3)),
             C("Rectifier", channels=4, kernel_shape=(5, 5)),
             C("Rectifier", channels=8, kernel_shape=(3, 3)),
             L("Linear")
         ],
              n_iter=1))
Beispiel #30
0
 def setUp(self):
     self.nn = MLPR(
         layers=[
             L("Rectifier", units=16),
             L("Sigmoid", units=12),
             L("ExpLin", units=8),
             L("Tanh", units=4),
             L("Linear")],
         n_iter=1)