Ejemplo n.º 1
0
 def _init_acummulators(self):
     """
     Inicializo acumuladores usados para la optimizacion
     :return:
     """
     self.step_w = []
     self.step_b = []
     for layer in self.model.list_layers:
         shape_w = layer.get_weights().shape
         shape_b = layer.get_bias().shape
         self.step_w.append(LocalNeurons(np.zeros(shape_w), shape_w))
         self.step_b.append(LocalNeurons(np.zeros(shape_b), shape_b))
Ejemplo n.º 2
0
    def __init__(self, n_in, n_out, activation='ReLU', distributed=False, w=None, b=None, rng=None):
        self.n_out = n_out
        self.n_in = n_in
        self.activation = act.fun_activation[activation]
        self.activation_d = act.fun_activation_d[activation]
        distributed = False  # TODO completar esta funcionalidad

        if rng is None:
            rng = np.random.RandomState(123)
        self.rng = rng
        self.rnd_state = self.rng.get_state()

        self.shape_w = n_out, n_in
        self.shape_b = n_out, 1

        # Recomendaciones de http://cs231n.github.io/neural-networks-2/ y http://deeplearning.net/tutorial/mlp.html#mlp
        # TODO: ver si conviene dejar acá la inicializ de pesos, o en core.neurons (en términos de legibilidad)
        if w is None:
            if activation is "Tanh":
                w = np.asarray(
                    self.rng.uniform(
                        low=-np.sqrt(6.0 / (n_in + n_out)),
                        high=+np.sqrt(6.0 / (n_in + n_out)),
                        size=self.shape_w),
                    dtype=np.dtype(float)
                )
            elif activation is "Sigmoid":
                w = np.asarray(
                    self.rng.uniform(
                        low=-np.sqrt(6.0 / (n_in + n_out))*4.0,
                        high=+np.sqrt(6.0 / (n_in + n_out))*4.0,
                        size=self.shape_w),
                    dtype=np.dtype(float)
                )
            else:
                w = self.rng.randn(*self.shape_w) * np.sqrt(2.0/n_in)

        if b is None:
            b = np.zeros(self.shape_b, dtype=np.dtype(float))

        # TODO weights_T era p/ poder hacer operaciones distribuidas, pero se deja como TBC la class DistributedNeurons
        assert distributed is False, logger.error("DistributedNeurons will be implemented soon ...")
        self.weights = LocalNeurons(w, self.shape_w)
        #self.weights_T = LocalNeurons(w.transpose(), self.shape_w[::-1])
        self.bias = LocalNeurons(b, self.shape_b)
Ejemplo n.º 3
0
 def test_operators(self):
     logger.info("Testeando operadores...")
     other = np.ones(self.matrix_neurons.shape)
     # Return: LocalNeurons
     res = self.matrix_neurons * other
     assert np.array_equiv(res.matrix, np.ones(self.matrix_neurons.shape))
     res = self.matrix_neurons / other
     assert np.array_equiv(res.matrix, np.ones(self.matrix_neurons.shape))
     res = self.matrix_neurons - other
     assert np.array_equiv(res.matrix, np.zeros(self.matrix_neurons.shape))
     res = self.matrix_neurons + other
     assert np.array_equiv(res.matrix,
                           np.ones(self.matrix_neurons.shape) * 2)
     res **= 2  # LocalNeurons
     assert np.array_equiv(res.matrix,
                           np.ones(self.matrix_neurons.shape) * 4)
     assert self.matrix_neurons == LocalNeurons(self.matrix_neurons.matrix,
                                                self.matrix_neurons.shape)
     logger.info("OK")
Ejemplo n.º 4
0
 def test_activation(self):
     logger.info("Testeando la integración de funciones de activación...")
     activation = 'Tanh'
     fun = fun_activation[activation]
     res = self.matrix_neurons.activation(fun)
     fun_d = fun_activation_d[activation]
     res_d = self.matrix_neurons.activation(fun_d)
     assert np.array_equiv(res.matrix,
                           fun(np.ones(self.matrix_neurons.shape)))
     assert np.array_equiv(res_d.matrix,
                           fun_d(np.ones(self.matrix_neurons.shape)))
     # Test de softmax como funcion de activacion
     N = self.matrix_neurons.rows
     shape = (N, )
     x = LocalNeurons(sc.parallelize(range(N)),
                      shape)  # Aprovecho para testear con RDD
     res = x.softmax()
     res = map(lambda e: e[0], res.matrix)
     exp_x = np.exp(range(N))
     y = exp_x / float(sum(exp_x))
     assert np.allclose(res, y)
     logger.info("OK")
Ejemplo n.º 5
0
 def __init__(self, shape=(100, 100)):
     mat = np.ones(shape)
     self.matrix_neurons = LocalNeurons(mat, shape)
Ejemplo n.º 6
0
    def test_all(self):
        # 1) Test de assert_features_label

        # 1.a) Sin exception
        # Dummy data
        features = np.array(range(10))
        label = 1
        data = (features, label)
        test_ok = True
        try:
            self.test_features_label(data)
            test_ok = True
        except:
            test_ok = False

        assert test_ok

        # 1.b) Con exception
        try:
            self.test_features_label(features)
            test_ok = False  # Tendría que arrojar un Exception por recibir un arreg de dim 10
        except:
            test_ok = True

        assert test_ok

        # 1.c) No debe actuar con None
        try:
            self.test_features_label(None)
            test_ok = True
        except:
            test_ok = False

        assert test_ok

        # 2) Test de assert_samedimension

        # 2.a) Sin exception
        try:
            self.test_samedimension(features, features)
            test_ok = True
        except:
            test_ok = False

        assert test_ok

        # 2.b) Con exception
        try:
            self.test_samedimension(features,
                                    features[:5])  # Dimensiones diferentes
            test_ok = False
        except:
            test_ok = True

        assert test_ok

        # 3) Test de assert_matchdimension

        # Matrices de neuronas
        shape = (5, 10)
        matrix1 = LocalNeurons(np.zeros(shape), shape=shape)

        shape = (10, 3)
        matrix2 = LocalNeurons(np.zeros(shape), shape=shape)

        # 3.a) Sin exception
        try:
            self.test_matchdimension(matrix1, matrix2)
            test_ok = True
        except:
            test_ok = False

        assert test_ok

        # 3.b) Con exception
        try:
            self.test_matchdimension(matrix2,
                                     matrix1)  # Dimensiones no compatibles
            test_ok = False
        except:
            test_ok = True

        assert test_ok

        # 4) Test de assert_sametype

        try:
            self.test_sametype(features, features)
            test_ok = True
        except:
            test_ok = False

        assert test_ok

        try:
            self.test_sametype(features, matrix2)
            test_ok = False
        except:
            test_ok = True

        assert test_ok