Ejemplo n.º 1
0
    def __init__ (  self,
                    input,
                    input_shape,
                    id,
                    num_classes = 10,
                    rng = None,
                    input_params = None,
                    borrow = True,
                    # batch_norm = False,
                    activation = 'softmax',
                    verbose = 2
                    ):

        super(classifier_layer,self).__init__(id = id, type = 'classifier', verbose = verbose)

        if rng is None:
            rng = numpy.random

        if verbose >= 3:
            print("... Creating classifier layer")
        # initialize with 0 the weights W as a matrix of shape (n_in, n_out)
        self.input = input
        # To copy weights previously created or some wierd initializations
        if input_params is not None:
            self.w = input_params[0]
            self.b = input_params[1]
        else:
            self.w = theano.shared(value=numpy.asarray(0.01 * rng.standard_normal(
                                     size=(input_shape[1], num_classes)),
                                     dtype=theano.config.floatX), name='weights', borrow = borrow)
            self.b = theano.shared(value=numpy.zeros((num_classes,),
                                        dtype=theano.config.floatX), name='bias', borrow = borrow)

        self.fit = T.dot(input, self.w) + self.b
        self.p_y_given_x, softmax_shp = _activate(x = self.fit,
                                                  activation = activation,
                                                  input_size = num_classes,
                                                  verbose = verbose,
                                                  dimension = 2)

        # compute prediction as class whose probability is maximal in symbolic form
        self.inference = self.p_y_given_x # this is where batchnorm test should go.
        self.predictions = T.argmax(self.inference, axis=1)

        # parameters of the model
        self.L1 = abs(self.w).sum()
        self.L2 = (self.w ** 2).sum()
        self.params = [self.w, self.b]
        self.active_params = [self.w, self.b]
        self.probabilities = T.log(self.p_y_given_x)
        self.output = self.p_y_given_x
        self.output_shape = (input_shape[0], num_classes)
        self.num_neurons = num_classes
        self.activation = activation
        self.dropout_rate = 0
        self.batch_norm = False

        if verbose >= 3:
            print("... Classifier layer is created with output shape " + str(self.output_shape))
Ejemplo n.º 2
0
 def test10_abstract_layer_activate_maxout_tuple(self,mock_maxout):
     mock_maxout.return_value = (self.input_ndarray,self.input_shape)
     self.out,self.out_shape = _activate(self.input_ndarray,("maxout","type",self.input_shape),self.input_shape,self.verbose,**{'dimension':(10,10)})
     self.assertTrue(numpy.allclose(self.out,self.input_ndarray))
Ejemplo n.º 3
0
 def test9_abstract_layer_activate_squared(self,mock_squared):
     mock_squared.return_value = self.input_ndarray
     self.out,self.out_shape = _activate(self.input_ndarray,"squared",self.input_shape,self.verbose)
     self.assertTrue(numpy.allclose(self.out,self.input_ndarray))
Ejemplo n.º 4
0
 def test8_abstract_layer_activate_softmax(self,mock_softmax):
     mock_softmax.return_value = self.input_ndarray
     self.out,self.out_shape = _activate(self.input_ndarray,"softmax",self.input_shape,self.verbose)
     self.assertTrue(numpy.allclose(self.out,self.input_ndarray))
Ejemplo n.º 5
0
 def test7_abstract_layer_activate_tanh(self,mock_tanh):
     mock_tanh.return_value = self.input_ndarray
     self.out,self.out_shape = _activate(self.input_ndarray,"tanh",self.input_shape,self.verbose)
     self.assertTrue(numpy.allclose(self.out,self.input_ndarray))
Ejemplo n.º 6
0
 def test5_abstract_layer_activate_abs(self,mock_abs):
     mock_abs.return_value = self.input_ndarray
     self.out,self.out_shape = _activate(self.input_ndarray,"abs",self.input_shape,self.verbose)
     self.assertTrue(numpy.allclose(self.out,self.input_ndarray))
Ejemplo n.º 7
0
 def test4_abstract_layer_activate_relu(self,mock_relu):
     mock_relu.return_value = self.input_ndarray
     self.out,self.out_shape = _activate(self.input_ndarray,"relu",self.input_shape,self.verbose)
     self.assertTrue(numpy.allclose(self.out,self.input_ndarray))