Exemple #1
0
    def test_UnusedParameterWarning(self):
        nn = MLPR(layers=[L("Linear", pieces=2)], n_iter=1)
        a_in = numpy.zeros((8, 16))
        nn._initialize(a_in, a_in)

        assert_in('Parameter `pieces` is unused', self.buf.getvalue())
        self.buf = io.StringIO()  # clear
 def make(self, activation, seed=1234, train=False, **keywords):
     nn = MLPR(layers=[L(activation, units=16, **keywords), L("Linear", units=1)], random_state=seed, n_iter=1)
     if train:
         nn.fit(self.a_in, self.a_out)
     else:
         nn._initialize(self.a_in, self.a_out)
     return nn
    def test_UnusedParameterWarning(self):
        nn = MLPR(layers=[L("Linear", pieces=2)], n_iter=1)
        a_in = numpy.zeros((8,16))
        nn._initialize(a_in, a_in)

        assert_in('Parameter `pieces` is unused', self.buf.getvalue())
        self.buf = io.StringIO() # clear
class TestSerializedNetwork(TestLinearNetwork):
    def setUp(self):
        self.original = MLPR(layers=[L("Linear")])
        a_in, a_out = numpy.zeros((8, 16)), numpy.zeros((8, 4))
        self.original._initialize(a_in, a_out)

        buf = io.BytesIO()
        pickle.dump(self.original, buf)
        buf.seek(0)
        self.nn = pickle.load(buf)

    def test_TypeOfWeightsArray(self):
        for w, b in self.nn._mlp_to_array():
            assert_equal(type(w), numpy.ndarray)
            assert_equal(type(b), numpy.ndarray)

    # Override base class test, you currently can't re-train a network that
    # was serialized and deserialized.
    def test_FitAutoInitialize(self):
        pass

    def test_ResizeInputFrom4D(self):
        pass

    def test_ResizeInputFrom3D(self):
        pass

    def test_PredictNoOutputUnitsAssertion(self):
        # Override base class test, this is not initialized but it
        # should be able to predict without throwing assert.
        assert_true(self.nn.is_initialized)

    def test_PredictAlreadyInitialized(self):
        a_in = numpy.zeros((8, 16))
        self.nn.predict(a_in)
class TestSerializedNetwork(TestLinearNetwork):

    def setUp(self):
        self.original = MLPR(layers=[L("Linear")])
        a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
        self.original._initialize(a_in, a_out)

        buf = io.BytesIO()
        pickle.dump(self.original, buf)
        buf.seek(0)
        self.nn = pickle.load(buf)

    def test_TypeOfWeightsArray(self):
        for w, b in self.nn._mlp_to_array():
            assert_equal(type(w), numpy.ndarray)
            assert_equal(type(b), numpy.ndarray)

    # Override base class test, you currently can't re-train a network that
    # was serialized and deserialized.
    def test_FitAutoInitialize(self): pass
    def test_ResizeInputFrom4D(self): pass
    def test_ResizeInputFrom3D(self): pass

    def test_PredictNoOutputUnitsAssertion(self):
        # Override base class test, this is not initialized but it
        # should be able to predict without throwing assert.
        assert_true(self.nn.is_initialized)

    def test_PredictAlreadyInitialized(self):
        a_in = numpy.zeros((8,16))
        self.nn.predict(a_in)
    def run_EqualityTest(self, copier, asserter):
        for activation in ["Rectifier", "Sigmoid", "Maxout", "Tanh"]:
            nn1 = MLPR(layers=[L(activation, units=16, pieces=2), L("Linear", units=1)], random_state=1234)
            nn1._initialize(self.a_in, self.a_out)

            nn2 = copier(nn1, activation)
            asserter(numpy.all(nn1.predict(self.a_in) == nn2.predict(self.a_in)))
Exemple #7
0
 def ctor(_, activation):
     nn = MLPR(layers=[
         L(activation, units=16, pieces=2),
         L("Linear", units=1)
     ],
               random_state=1234)
     nn._initialize(self.a_in, self.a_out)
     return nn
    def test_SetParametersConstructor(self):
        weights = numpy.random.uniform(-1.0, +1.0, (16,4))
        biases = numpy.random.uniform(-1.0, +1.0, (4,))
        nn = MLPR(layers=[L("Linear")], parameters=[(weights, biases)])

        a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
        nn._initialize(a_in, a_out)
        assert_in('Reloading parameters for 1 layer weights and biases.', self.buf.getvalue())
    def test_MultiLayerPooling(self):
        nn = MLPR(layers=[
                    C("Rectifier", channels=4, kernel_shape=(3,3), pool_shape=(2,2)),
                    C("Rectifier", channels=4, kernel_shape=(3,3), pool_shape=(2,2)),
                    L("Linear")])

        a_in, a_out = numpy.zeros((8,32,32,1)), numpy.zeros((8,16))
        nn._initialize(a_in, a_out)
        assert_equal(nn.unit_counts, [1024, 900, 196, 16])
Exemple #10
0
    def test_SetParametersConstructor(self):
        weights = numpy.random.uniform(-1.0, +1.0, (16, 4))
        biases = numpy.random.uniform(-1.0, +1.0, (4, ))
        nn = MLPR(layers=[L("Linear")], parameters=[(weights, biases)])

        a_in, a_out = numpy.zeros((8, 16)), numpy.zeros((8, 4))
        nn._initialize(a_in, a_out)
        assert_in('Reloading parameters for 1 layer weights and biases.',
                  self.buf.getvalue())
Exemple #11
0
    def test_MultiLayerPooling(self):
        nn = MLPR(layers=[
            C("Rectifier", channels=4, kernel_shape=(3, 3), pool_shape=(2, 2)),
            C("ExpLin", channels=4, kernel_shape=(3, 3), pool_shape=(2, 2)),
            L("Linear")
        ])

        a_in, a_out = numpy.zeros((8, 32, 32, 1)), numpy.zeros((8, 16))
        nn._initialize(a_in, a_out)
        assert_equal(nn.unit_counts, [1024, 900, 196, 16])
Exemple #12
0
 def make(self, activation, seed=1234, train=False, **keywords):
     nn = MLPR(
         layers=[L(activation, units=16, **keywords),
                 L("Linear", units=1)],
         random_state=seed,
         n_iter=1)
     if train:
         nn.fit(self.a_in, self.a_out)
     else:
         nn._initialize(self.a_in, self.a_out)
     return nn
Exemple #13
0
 def make(self, activation, seed=1234, train=False, **keywords):
     nn = MLPR(
         layers=[C(activation, channels=16, kernel_shape=(3, 3), **keywords), L("Linear")],
         random_state=seed,
         n_iter=1,
     )
     if train:
         nn.fit(self.a_in, self.a_out)
     else:
         nn._initialize(self.a_in, self.a_out)
     return nn
 def test_SetLayerParamsDict(self):
     nn = MLPR(layers=[L("Sigmoid", units=32), L("Linear", name='abcd')])
     a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
     nn._initialize(a_in, a_out)
     
     weights = numpy.random.uniform(-1.0, +1.0, (32,4))
     biases = numpy.random.uniform(-1.0, +1.0, (4,))
     nn.set_parameters({'abcd': (weights, biases)})
     
     p = nn.get_parameters()
     assert_true((p[1].weights.astype('float32') == weights.astype('float32')).all())
     assert_true((p[1].biases.astype('float32') == biases.astype('float32')).all())
 def test_SetLayerParamsList(self):
     nn = MLPR(layers=[L("Linear")])
     a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
     nn._initialize(a_in, a_out)
     
     weights = numpy.random.uniform(-1.0, +1.0, (16,4))
     biases = numpy.random.uniform(-1.0, +1.0, (4,))
     nn.set_parameters([(weights, biases)])
     
     p = nn.get_parameters()
     assert_true((p[0].weights.astype('float32') == weights.astype('float32')).all())
     assert_true((p[0].biases.astype('float32') == biases.astype('float32')).all())
 def test_GetLayerParams(self):
     nn = MLPR(layers=[L("Linear")], n_iter=1)
     a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
     nn._initialize(a_in, a_out)
     
     p = nn.get_parameters()
     assert_equals(type(p), list)
     assert_true(isinstance(p[0], tuple))
     
     assert_equals(p[0].layer, 'output')
     assert_equals(p[0].weights.shape, (16, 4))
     assert_equals(p[0].biases.shape, (4,))
Exemple #17
0
 def make(self, activation, seed=1234, train=False, **keywords):
     nn = MLPR(layers=[
         C(activation, channels=16, kernel_shape=(3, 3), **keywords),
         L("Linear")
     ],
               random_state=seed,
               n_iter=1)
     if train:
         nn.fit(self.a_in, self.a_out)
     else:
         nn._initialize(self.a_in, self.a_out)
     return nn
    def test_GetLayerParams(self):
        nn = MLPR(layers=[L("Linear")], n_iter=1)
        a_in, a_out = numpy.zeros((8, 16)), numpy.zeros((8, 4))
        nn._initialize(a_in, a_out)

        p = nn.get_parameters()
        assert_equals(type(p), list)
        assert_true(isinstance(p[0], tuple))

        assert_equals(p[0].layer, 'output')
        assert_equals(p[0].weights.shape, (16, 4))
        assert_equals(p[0].biases.shape, (4, ))
 def test_LayerParamsSkipOneWithNone(self):
     nn = MLPR(layers=[L("Sigmoid", units=32), L("Linear", name='abcd')])
     a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
     nn._initialize(a_in, a_out)
     
     weights = numpy.random.uniform(-1.0, +1.0, (32,4))
     biases = numpy.random.uniform(-1.0, +1.0, (4,))
     nn.set_parameters([None, (weights, biases)])
     
     p = nn.get_parameters()
     assert_true((p[1].weights == weights).all())
     assert_true((p[1].biases == biases).all())
Exemple #20
0
    def run_EqualityTest(self, copier, asserter):
        # Only PyLearn2 supports Maxout.
        extra =  ["Maxout"] if sknn.backend.name == 'pylearn2' else []
        for activation in ["Rectifier", "Sigmoid", "Tanh", "ExpLin"] + extra:
            nn1 = MLPR(layers=[L(activation, units=16), L("Linear", units=1)], random_state=1234)
            nn1._initialize(self.a_in, self.a_out)

            nn2 = copier(nn1, activation)
            print('activation', activation)
            a_out1 = nn1.predict(self.a_in)
            a_out2 = nn2.predict(self.a_in)
            print(a_out1, a_out2)
            asserter(numpy.all(nn1.predict(self.a_in) - nn2.predict(self.a_in) < 1E-6))
 def test_GetParamsThenConstructor(self):
     nn1 = MLPR(layers=[L("Linear")], n_iter=1)
     a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
     nn1._initialize(a_in, a_out)
     
     p1 = nn1.get_parameters()
     print(len(p1))
     nn2 = MLPR(layers=[L("Linear")], n_iter=1, parameters=p1)
     nn2._initialize(a_in, a_out)
     p2 = nn2.get_parameters()
     print(len(p2))
     
     assert_true((p1[0].weights.astype('float32') == p2[0].weights.astype('float32')).all())
     assert_true((p1[0].biases.astype('float32') == p2[0].biases.astype('float32')).all())
    def test_SetLayerParamsDict(self):
        nn = MLPR(layers=[L("Sigmoid", units=32), L("Linear", name='abcd')])
        a_in, a_out = numpy.zeros((8, 16)), numpy.zeros((8, 4))
        nn._initialize(a_in, a_out)

        weights = numpy.random.uniform(-1.0, +1.0, (32, 4))
        biases = numpy.random.uniform(-1.0, +1.0, (4, ))
        nn.set_parameters({'abcd': (weights, biases)})

        p = nn.get_parameters()
        assert_true((
            p[1].weights.astype('float32') == weights.astype('float32')).all())
        assert_true(
            (p[1].biases.astype('float32') == biases.astype('float32')).all())
    def test_SetLayerParamsList(self):
        nn = MLPR(layers=[L("Linear")])
        a_in, a_out = numpy.zeros((8, 16)), numpy.zeros((8, 4))
        nn._initialize(a_in, a_out)

        weights = numpy.random.uniform(-1.0, +1.0, (16, 4))
        biases = numpy.random.uniform(-1.0, +1.0, (4, ))
        nn.set_parameters([(weights, biases)])

        p = nn.get_parameters()
        assert_true((
            p[0].weights.astype('float32') == weights.astype('float32')).all())
        assert_true(
            (p[0].biases.astype('float32') == biases.astype('float32')).all())
Exemple #24
0
    def test_GetParamsThenConstructor(self):
        nn1 = MLPR(layers=[L("Linear")], n_iter=1)
        a_in, a_out = numpy.zeros((8, 16)), numpy.zeros((8, 4))
        nn1._initialize(a_in, a_out)

        p1 = nn1.get_parameters()
        print(len(p1))
        nn2 = MLPR(layers=[L("Linear")], n_iter=1, parameters=p1)
        nn2._initialize(a_in, a_out)
        p2 = nn2.get_parameters()
        print(len(p2))

        assert_true(
            (p1[0].weights.astype('float32') == p2[0].weights.astype('float32')
             ).all())
        assert_true(
            (p1[0].biases.astype('float32') == p2[0].biases.astype('float32')
             ).all())
 def ctor(_, activation):
     nn = MLPR(layers=[L(activation, units=16), L("Linear", units=1)], random_state=1234)
     nn._initialize(self.a_in, self.a_out)
     return nn