Пример #1
0
    def __init__(self, activator_type="linear",
                 input_dim = None, output_dim = None,
                 W=None, b=None,
                 ):

        # `W` is initialized with `W_values` which is uniformely sampled
        # from sqrt(-6./(n_in+n_hidden)) and sqrt(6./(n_in+n_hidden))
        # for tanh activation function
        # the output of uniform if converted using asarray to dtype
        # theano.config.floatX so that the code is runable on GPU
        # Note : optimal initialization of weights is dependent on the
        #        activation function used (among other things).
        #        For example, results presented in [Xavier10] suggest that you
        #        should use 4 times larger initial weights for sigmoid
        #        compared to tanh
        #        We have no info for other function, so we use the same as
        #        tanh.

        assert activator_type is not None, "Activation must be provided"
        self.activator_type = activator_type
        self.activator = get_activation(self.activator_type)

        if input_dim is not None and output_dim is not None:

            if W is None:
                rng = get_numpy_rng()
                W = numpy.asarray(rng.uniform(
                        low=-numpy.sqrt(6. / (input_dim + output_dim)),
                        high=numpy.sqrt(6. / (input_dim + output_dim)),
                        size=(input_dim, output_dim)), dtype=theano.config.floatX)
                if self.activator == theano.tensor.nnet.sigmoid:
                    W *= 4
            else:
                assert input_dim == W.shape[0] and input_dim == W.shape[1]

            if b is None:
                b = numpy.zeros((output_dim,), dtype=theano.config.floatX)
            else:
                assert output_dim == b.shape[0]

            self.W = theano.shared(value=W, borrow=True)
            self.b = theano.shared(value=b, borrow=True)
            self.input_dim_, self.output_dim_ = W.shape
        elif W is not None and b is not None:
            self.W = theano.shared(value=W, borrow=True)
            self.b = theano.shared(value=b, borrow=True)
            self.input_dim_, self.output_dim_ = W.shape

        else:
            raise Exception("Perception Layer needs parameter "
                            "in pair of (W,b) or (n_in, n_out) besides activation")
Пример #2
0
    def __init__(self,
                 activator_type="linear",
                 tensor_shape = None,
                 init_W=None,
                 init_b=None, ):

        assert activator_type is not None, "Activation must be provided"
        self.activator_type = activator_type
        self.activator = get_activation(self.activator_type)


        if init_W is not None and tensor_shape is not None:
            assert init_W.shape == tensor_shape, "init tensor size is not equal to the given tensor shape"


        # Input: a 4D tensor corresponding to a mini-batch of input images with shape:
        #        [mini-batch size, number of input feature maps, image height, image width].
        # Weight: a 4D tensor with shape :
        #        [number of feature maps at layer m, number of feature maps at layer m-1, filter height, filter width]
        rng = get_numpy_rng()
        if init_W is None and tensor_shape is None:
            raise Exception("neither W now tensor shape is provided.")
        elif init_W is not None:
            self.W = theano.shared(init_W.astype(theano.config.floatX), borrow=True)
            self.tensor_shape = init_W.shape
        elif init_W is None:
            self.tensor_shape = tensor_shape
            (output_feature_map_num, input_feature_map_num, conv_window , filter_width) = tensor_shape
            w_bound = np.sqrt(input_feature_map_num * filter_width)
            init_W = rng.uniform(low=-1.0 / w_bound, high=1.0 / w_bound, size=tensor_shape)

            self.W = theano.shared(init_W.astype(theano.config.floatX),  borrow=True)



        if init_b is None and tensor_shape is None:
            raise Exception("neither b now tensor shape is provided.")
        elif init_b is not None:
            self.b = theano.shared(init_b.astype(theano.config.floatX),  borrow=True)
        elif init_b is None:
            (output_feature_map_num, input_feature_map_num, conv_window , filter_width) = tensor_shape
            b_shape = (output_feature_map_num,)
            init_b = rng.uniform(low=-.5, high=.5, size=b_shape)

            self.b = theano.shared(init_b.astype(theano.config.floatX),  borrow=True)
    def __init__(self, name, class_num, trans_mat_prior = None):
        """ Initialize the parameters of the logistic regression

        :type input: theano.tensor.TensorType
        :param input: symbolic variable that describes the input of the
                      architecture (one minibatch)

        :type n_in: int
        :param n_in: number of input units, the dimension of the space in
                     which the datapoints lie

        :type n_out: int
        :param n_out: number of output units, the dimension of the space in
                      which the labels lie

        """

        assert isinstance(name, str) and len(name) > 0
        self.name = name
        rng = get_numpy_rng()
        # initialize with 0 the weights W as a matrix of shape (n_in, n_out)
#        self.W = theano.shared(value=np.asarray(rng.uniform(low=-2.0, high=2.0, size=(n_in, n_out)),
#                                                 dtype=theano.config.floatX),
#                                name='W', borrow=True)
        # initialize the baises b as a vector of n_out 0s
#        self.b = theano.shared(value=np.asarray(rng.uniform(low=-2.0, high=2.0, size=(n_out,)),
#                                                 dtype=theano.config.floatX),
#                               name='b', borrow=True)

        # trasition matrix of class tags
        # A_{i,j} means the transition prob from class i to class j
        # A_{0, i} means the prob of start with class i
#        self.tag_trans_matrix = theano.shared(value = np.asarray(rng.uniform(low=-2.0, high=2.0, size=(class_num + 1 ,class_num )),
#                                                 dtype=theano.config.floatX),
#                                              name='tag_trans', borrow = True)
        if trans_mat_prior == None:
            self.tag_trans_matrix = theano.shared(value = np.zeros((class_num + 1 ,class_num ), dtype=theano.config.floatX),
                                                  name='path_layer_trans_%s' % (self.name), borrow = True)
        else:
            self.tag_trans_matrix = theano.shared(value = np.asarray(trans_mat_prior, dtype=theano.config.floatX),
                                                  name='path_layer_trans_%s' % (self.name), borrow = True)
Пример #4
0
    def __init__(self,
                 input_dim = None, output_dim = None,
                 W = None, b = None):
        """ Initialize the parameters of the logistic regression

        :type input: theano.tensor.TensorType
        :param input: symbolic variable that describes the input of the
                      architecture (one minibatch)

        :type n_in: int
        :param n_in: number of input units, the dimension of the space in
                     which the datapoints lie

        :type n_out: int
        :param n_out: number of output units, the dimension of the space in
                      which the labels lie

        """
#        param_valid = (W is not None and b is not None ) or (n_in is not None and n_out is not None)

#        assert param_valid, "The construction param is not valid"

        if input_dim is not None and output_dim is not None:

            if W is None:
                rng = get_numpy_rng()
                W = numpy.asarray(rng.uniform(
                        low=-numpy.sqrt(6. / (input_dim + output_dim)),
                        high=numpy.sqrt(6. / (input_dim + output_dim)),
                        size=(input_dim, output_dim)), dtype=theano.config.floatX)
                if self.activator == theano.tensor.nnet.sigmoid:
                    W *= 4
            else:
                assert input_dim == W.shape[0] and input_dim == W.shape[1]

            if b is None:
                b = numpy.zeros((output_dim,), dtype=theano.config.floatX)
            else:
                assert output_dim == b.shape[0]

            self.W = theano.shared(value=W, borrow=True)
            self.b = theano.shared(value=b, borrow=True)
            self.input_dim_, self.output_dim_ = W.shape
        elif W is not None and b is not None:
            self.W = theano.shared(value=W, borrow=True)
            self.b = theano.shared(value=b, borrow=True)
            self.input_dim_, self.output_dim_ = W.shape

        else:
            raise Exception("Perception Layer needs parameter "
                            "in pair of (W,b) or (n_in, n_out) besides activation")


        if W:
            self.W = theano.shared(value=W.astype(theano.config.floatX),
                                name='softmax_W_%s' % (self.name), borrow=True)
        elif (n_in is not None and n_out is not None) :
            # initialize with 0 the weights W as a matrix of shape (n_in, n_out)
            rng = get_numpy_rng()
            W_values = numpy.asarray(rng.uniform(
                    low=-numpy.sqrt(6. / (n_in + n_out)),
                    high=numpy.sqrt(6. / (n_in + n_out)),
                    size=(n_in, n_out)), dtype=theano.config.floatX)
            self.W = theano.shared(value=W_values,
                                    name='softmax_W_%s' % (self.name), borrow=True)

        if b:
            self.b = theano.shared(value=b.astype(theano.config.floatX),
                                name='softmax_b_%s' % (self.name), borrow=True)
        elif (n_in is not None and n_out is not None) :
            # initialize the baises b as a vector of n_out 0s
            self.b = theano.shared(value=numpy.zeros((n_out,),
                                                 dtype=theano.config.floatX),
                               name='softmax_b_%s' % (self.name), borrow=True)