Ejemplo n.º 1
0
    def __init__(self, rng, hiddenLayerList, n_out):
        """Initialize the parameters for the multilayer perceptron

        :type rng: numpy.random.RandomState
        :param rng: a random number generator used to initialize weights

        :type hiddenLayerList: [HiddenLayer instances]
        :param hiddenLayerList: A list of hidden layers

        :type n_out: int
        :param n_out: number of output units, the dimension of the space in
        which the labels lie

        """
        # connect hidden layers (no need to, they're already connected outside when building them)
        self.hiddenLayers=hiddenLayerList
        # prevLy=hiddenLayerList[0]
        # prevLy.input=input
        # for ly in hiddenLayerList[1:]:
        #     ly.input=prevLy.output
        #     prevLy=ly

        # The logistic regression layer gets as input the hidden units of the hidden layer
        self.logRegressionLayer = LogisticRegression(
            input=hiddenLayerList[-1].output,
            n_in=hiddenLayerList[-1].inOutDim[1],
            n_out=n_out)

        # symbolic variables for data
        self.X=self.hiddenLayers[0].input # training data
        self.y=T.bvector('y') # labels for training data

        # L1 norm ; one regularization option is to enforce L1 norm to be small
        self.L1 = abs(self.logRegressionLayer.W).sum()
        for ly in self.hiddenLayers:
            self.L1 += abs(ly.W).sum()

        # square of L2 norm ; one regularization option is to enforce square of L2 norm to be small
        self.L2_sqr = (self.logRegressionLayer.W ** 2).sum()
        for ly in self.hiddenLayers:            
            self.L2_sqr += (ly.W ** 2).sum()

        # negative log likelihood of the MLP is given by the negative log likelihood of the output
        # of the model, computed in the logistic regression layer
        self.negative_log_likelihood = self.logRegressionLayer.negative_log_likelihood
        # same holds for the function computing the number of errors
        self.errors = self.logRegressionLayer.errors

        # the parameters of the model are the parameters of the all layers
        self.params=self.logRegressionLayer.params
        for ly in self.hiddenLayers:
            self.params+=ly.params