Ejemplo n.º 1
0
    def test_TrainConstantOneEpoch(self):
        for t in ['csr_matrix', 'csc_matrix']:
            sparse_matrix = getattr(scipy.sparse, t)
            X_s, y_s = sparse_matrix((8, 16), dtype=numpy.float32), sparse_matrix((8, 16), dtype=numpy.float32)
            X, y = X_s.toarray(), y_s.toarray()
            
            nn1 = MLP(layers=[L("Linear")], n_iter=1, random_state=1234)
            nn1._fit(X, y)

            nn2 = MLP(layers=[L("Linear")], n_iter=1, random_state=1234)
            nn2._fit(X_s, y_s)

            assert_true(numpy.all(nn1._predict(X_s) == nn1._predict(X_s)))
Ejemplo n.º 2
0
    def test_TrainRandomOneEpoch(self):
        for t in ['dok_matrix', 'lil_matrix']:
            sparse_matrix = getattr(scipy.sparse, t)
            X_s, y_s = sparse_matrix((8, 16), dtype=numpy.float32), sparse_matrix((8, 16), dtype=numpy.float32)
            for i in range(X_s.shape[0]):
                X_s[i,random.randint(0, X_s.shape[1]-1)] = 1.0
                y_s[i,random.randint(0, y_s.shape[1]-1)] = 1.0
            X, y = X_s.toarray(), y_s.toarray()

            nn1 = MLP(layers=[L("Linear")], n_iter=1, random_state=1234)
            nn1._fit(X, y)

            nn2 = MLP(layers=[L("Linear")], n_iter=1, random_state=1234)
            nn2._fit(X_s, y_s)

            assert_true(numpy.all(nn1._predict(X_s) == nn1._predict(X_s)))
Ejemplo n.º 3
0
 def setUp(self):
     self.batch_count = 0
     self.batch_items = 0
     self.nn = MLP(
                 layers=[L("Rectifier")],
                 learning_rate=0.001, n_iter=1,
                 callback={'on_batch_start': self.on_batch_start})
Ejemplo n.º 4
0
 def test_EpochCallback(self):
     a_in, a_out = numpy.zeros((8, 16)), numpy.zeros((8, 4))
     nn = MLP(layers=[L("Linear")],
              n_iter=4,
              callback={'on_epoch_start': self._callback})
     nn._fit(a_in, a_out)
     assert_equals(len(self.data), 4)
    def test_FitTerminateStable(self):
        a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
        self.nn = MLP(
                    layers=[L("Gaussian")], learning_rate=0.001,
                    n_iter=None, n_stable=1, f_stable=0.1,
                    valid_set=(a_in, a_out))

        self.nn._fit(a_in, a_out)
Ejemplo n.º 6
0
    def test_FitAutomaticValidation(self):
        a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
        self.nn = MLP(
                    layers=[L("Linear")], learning_rate=0.001,
                    n_iter=10, n_stable=1, f_stable=0.1,
                    valid_size=0.25)

        self.nn._fit(a_in, a_out)
Ejemplo n.º 7
0
    def test_FitTerminateStable(self):
        a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
        activation = "Gaussian" if sknn.backend.name == "pylearn2" else "Linear"
        self.nn = MLP(
                    layers=[L(activation)], learning_rate=0.001,
                    n_iter=None, n_stable=1, f_stable=0.01,
                    valid_set=(a_in, a_out))

        self.nn._fit(a_in, a_out)
Ejemplo n.º 8
0
 def test_BatchCallbacks(self):
     a_in, a_out = numpy.zeros((8, 16)), numpy.zeros((8, 4))
     nn = MLP(layers=[L("Linear")],
              n_iter=1,
              batch_size=4,
              callback=self._callback)
     nn._fit(a_in, a_out)
     assert_equals(len(self.data['on_batch_start']), 2)
     assert_equals(len(self.data['on_batch_finish']), 2)
Ejemplo n.º 9
0
    def test_TrainingUserDefined(self):
        self.counter = 0
        
        def terminate(**_):
            self.counter += 1
            return False

        a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
        self.nn = MLP(layers=[L("Linear")], n_iter=100, n_stable=None, callback={'on_epoch_finish': terminate})
        self.nn._fit(a_in, a_out)
        
        assert_equals(self.counter, 1)
Ejemplo n.º 10
0
 def test_TrainingCallbacks(self):
     a_in, a_out = numpy.zeros((8, 16)), numpy.zeros((8, 4))
     nn = MLP(layers=[L("Linear")], n_iter=4, callback=self._callback)
     nn._fit(a_in, a_out)
     assert_equals(len(self.data['on_train_start']), 1)
     assert_equals(len(self.data['on_train_finish']), 1)
Ejemplo n.º 11
0
 def setUp(self):
     self.nn = MLP(layers=[L("Linear", units=3)], n_iter=1)
     self.directory = tempfile.mkdtemp()
Ejemplo n.º 12
0
 def test_TrainingInfinite(self):
     a_in, a_out = numpy.zeros((8,16)), numpy.zeros((8,4))
     self.nn = MLP(layers=[L("Linear")], n_iter=None, n_stable=None)
     assert_raises(AssertionError, self.nn._fit, a_in, a_out)
Ejemplo n.º 13
0
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import MinMaxScaler, StandardScaler
from sklearn.linear_model import LogisticRegression
#########################################
scaler1 = StandardScaler()
scaler = MinMaxScaler(feature_range=(0.0, 1.0))
val_percent = 0.1
nn = MLP(
    layers=[
        Layer("Rectifier", units=100),
        Layer("Tanh", units=100),
        Layer("Softmax")
    ],
    learning_rule='momentum',
    learning_rate=0.005,
    batch_size=30,
    dropout_rate=0.1,
    n_iter=100,
    verbose=1,
    valid_size=val_percent,
    n_stable=30,
    debug=True,
    #    regularize = 'L2'
)

#nn = MLP(layers=[
#        Layer("Rectifier", units=100, pieces=2),
#        Layer("Softmax")],
#    learning_rate=0.001,    n_iter=25)

clf = Pipeline([("scaler", scaler1), ('neural network', nn)])
Ejemplo n.º 14
0
 def setUp(self):
     self.nn = MLP(layers=[L("Gaussian", units=4)], n_iter=1)
Ejemplo n.º 15
0
 def setUp(self):
     self.nn = MLP(layers=[
         C("Rectifier", kernel_shape=(3, 3), channels=4),
         L("Linear")
     ],
                   n_iter=1)
Ejemplo n.º 16
0
 def setUp(self):
     self.nn = MLP(layers=[L("Linear", units=4)], n_iter=1)
Ejemplo n.º 17
0
 def setUp(self):
     self.nn = MLP(layers=[C("Rectifier", channels=4), L("Linear")], n_iter=1)