def get_Params(self):

        # warning : sometimes with the gpu mode the list
        # of parameters can be from the output to the input layer
        # we need in this case to inverse the list

        # to check that we look at the list of parameters
        # of the whole system
        # if the first weight is a matrix then inverse
        #return a dictionary of params
        x = T.tensor4()
        cost = self.apply(x).sum()
        cg = ComputationGraph(cost)
        W = VariableFilter(roles=[WEIGHT])(cg.variables)
        B = VariableFilter(roles=[BIAS])(cg.variables)
        if W[0].name[:5] == "layer":
            return W, B

# find other parameters and retrieve them for the lists
        gamma = []
        beta = []
        index_gamma = []
        index_beta = []
        for w, b, i in zip(W, B, range(len(W))):

            if w.name == "log_gamma":
                index_gamma.append(i)
                gamma.append(w)
            if b.name == "beta":
                index_beta.append(i)
                beta.append(b)

        for i in index_gamma[::-1]:
            W.pop(i)
        for i in index_beta[::-1]:
            B.pop(i)

        if len(W) == 0:
            import pdb
            pdb.set_trace()
        if W[0].ndim == 2:
            W_ = []
            for i in xrange(len(W)):
                W_.append(W[len(W) - 1 - i])
            W = W_
        if B[0].ndim == 1:
            B_ = []
            for i in xrange(len(B)):
                B_.append(B[len(B) - 1 - i])
            B = B_

# if batch normalization has been introduced you need to reinject artificially
# the gamma and beta parameters so to fit with the actual protocol of dropout
        if len(gamma) != len(beta):
            raise Exception(
                " gamma and beta parameters should be balanced : (%d, %d)",
                len(gamma), len(beta))

        if len(gamma) != 0:
            if beta[0].shape.eval() != gamma[0].shape.eval():
                beta.reverse()
            W_new = []
            B_new = []
            for w, g in zip(W[:len(gamma)], gamma):
                W_new.append(w)
                W_new.append(g)
            W_new += W[len(gamma):]
            for b, b_ in zip(B[:len(gamma)], beta):
                B_new.append(b)
                B_new.append(b_)
            B_new += B[len(gamma):]
            W = W_new
            B = B_new

        for w, b, index in zip(W, B, range(len(W))):
            w.name = "layer_" + str(index) + "_W"
            b.name = "layer_" + str(index) + "_B"

        return W, B