예제 #1
0
파일: mlp.py 프로젝트: anirudh9119/vae
 def __init__(self, x, n_in, n_hid, n_out, nlayers=1, y=None, eps=None):
     super(GaussianMLP, self).__init__(x, n_in, n_hid, nlayers=nlayers, prefix='GaussianMLP_hidden')
     self.mu_layer = HiddenLayer(
         input=self.hidden_layers[-1].output,
         n_in=self.hidden_layers[-1].n_out,
         n_out=n_out,
         activation=None,
         prefix='GaussianMLP_mu'
     )
     # log(sigma^2)
     self.logvar_layer = HiddenLayer(
         input=self.hidden_layers[-1].output,
         n_in=self.hidden_layers[-1].n_out,
         n_out=n_out,
         activation=None,
         prefix='GaussianMLP_logvar'
     )
     self.mu = self.mu_layer.output
     self.var = T.exp(self.logvar_layer.output)
     self.sigma = T.sqrt(self.var)
     self.params = self.params + self.mu_layer.params +\
         self.logvar_layer.params
     # for use as encoder
     if eps:
         assert(y is None)
         # XXX separate reparametrization
         self.out = self.mu + self.sigma * eps
     # for use as decoder
     if y:
         assert(eps is None)
         # XXX specific to [0, 1] outputs
         self.out = T.nnet.sigmoid(self.mu)
         self.cost = -T.sum(log_diag_mvn(self.out, self.var)(y))
예제 #2
0
 def __init__(self, x, n_in, n_hid, n_out, nlayers=1, y=None, eps=None):
     super(GaussianMLP, self).__init__(x,
                                       n_in,
                                       n_hid,
                                       nlayers=nlayers,
                                       prefix='GaussianMLP_hidden')
     self.mu_layer = HiddenLayer(input=self.hidden_layers[-1].output,
                                 n_in=self.hidden_layers[-1].n_out,
                                 n_out=n_out,
                                 activation=None,
                                 prefix='GaussianMLP_mu')
     # log(sigma^2)
     self.logvar_layer = HiddenLayer(input=self.hidden_layers[-1].output,
                                     n_in=self.hidden_layers[-1].n_out,
                                     n_out=n_out,
                                     activation=None,
                                     prefix='GaussianMLP_logvar')
     self.mu = self.mu_layer.output
     self.var = T.exp(self.logvar_layer.output)
     self.sigma = T.sqrt(self.var)
     self.params = self.params + self.mu_layer.params +\
         self.logvar_layer.params
     # for use as encoder
     if eps:
         assert (y is None)
         # XXX separate reparametrization
         self.out = self.mu + self.sigma * eps
     # for use as decoder
     if y:
         assert (eps is None)
         # XXX specific to [0, 1] outputs
         self.out = T.nnet.sigmoid(self.mu)
         self.cost = -T.sum(log_diag_mvn(self.out, self.var)(y))
예제 #3
0
    def __init__(self,
                 x,
                 n_in,
                 n_hid,
                 n_out,
                 nlayers=1,
                 activation=None,
                 y=None,
                 eps=None,
                 COV=False):  ##Lksi=None,
        # if Lksi:
        # if eps and (y is None): #encoder!
        # super(GaussianMLP, self).__init__(x, n_in, n_hid, nlayers=nlayers, prefix='GaussianMLP_hidden', Lksi)
        # elif (eps is None) and y: #decoder!
        # super(GaussianMLP, self).__init__(x, n_in, n_hid, nlayers=nlayers, prefix='GaussianMLP_hidden', Lksi, self.G)
        # else:
        super(GaussianMLP, self).__init__(x,
                                          n_in,
                                          n_hid,
                                          nlayers=nlayers,
                                          prefix='GaussianMLP_hidden')

        ##mu&logvar are affine from h when encode
        self.mu_layer = HiddenLayer(
            input=self.hidden_layers[-1].output,
            n_in=self.hidden_layers[-1].n_out,
            n_out=n_out,
            activation=
            activation,  ##None T.nnet.softplus, not much diff. if logvar>0 freyfaces
            prefix='GaussianMLP_mu')
        # log(sigma^2)  ##h generate logvar, not sigma! logvar=2logsigma
        self.logvar_layer = HiddenLayer(
            input=self.hidden_layers[-1].output,
            n_in=self.hidden_layers[-1].n_out,
            n_out=n_out,
            activation=
            activation,  ##None, ReLU|sigmoid, keep logvar>0 for freyfaces
            prefix='GaussianMLP_logvar')
        self.mu = self.mu_layer.output
        self.var = T.exp(self.logvar_layer.output)
        self.params = self.params + self.mu_layer.params + self.logvar_layer.params

        def SampleKsi(d, u, mu, eps):  # icml14SBP(20)
            dn = 1.0 / d
            uDnu = T.sum(u * u * dn)
            coeff = (1 - 1.0 / T.sqrt(1.0 + uDnu)) / (uDnu + SMALLNUM)
            u = u.reshape((u.shape[0], 1))
            R = T.diag(T.sqrt(dn)) - coeff * T.dot(
                T.dot(T.diag(dn), T.dot(u, u.T)), T.diag(T.sqrt(dn)))
            return mu + T.dot(R, eps)

        if COV == False:
            self.sigma = T.sqrt(self.var)
            if eps:  # for use as encoder
                assert (y is None)
                self.out = self.mu + self.sigma * eps
            if y:  # for use as decoder
                assert (eps is None)
                self.out = T.nnet.sigmoid(
                    self.mu)  ##the grey degree of each pixel
                #Gaussian-LL of data y under (z, params)
                self.cost = -T.sum(log_diag_mvn(
                    self.out, self.var)(y))  ##(self.out, self.var)
        else:
            self.cov_u_layer = HiddenLayer(input=self.hidden_layers[-1].output,
                                           n_in=self.hidden_layers[-1].n_out,
                                           n_out=n_out,
                                           activation=activation,
                                           prefix='GaussianMLP_cov_u')
            self.u = self.cov_u_layer.output
            self.params = self.params + self.cov_u_layer.params
            if eps:  ##icml14(21)
                assert (y is None)
                self.out, _ = scan(SampleKsi,
                                   sequences=[self.var, self.u, self.mu, eps])
            if y:  # for use as decoder
                assert (eps is None)
                self.out = T.nnet.sigmoid(
                    self.mu)  ##the grey degree of each pixel
                self.cost = -T.sum(
                    log_nondiag_mvn(self.mu, self.var, self.u)(y))
예제 #4
0
파일: mlp.py 프로젝트: sshidy/SBP-DLGM
    def __init__(self, x, n_in, n_hid, n_out, nlayers=1, activation=None, y=None, eps=None, COV=False):  ##Lksi=None, 
        # if Lksi:
            # if eps and (y is None): #encoder!
                # super(GaussianMLP, self).__init__(x, n_in, n_hid, nlayers=nlayers, prefix='GaussianMLP_hidden', Lksi)
            # elif (eps is None) and y: #decoder!
                # super(GaussianMLP, self).__init__(x, n_in, n_hid, nlayers=nlayers, prefix='GaussianMLP_hidden', Lksi, self.G)
        # else:
        super(GaussianMLP, self).__init__(x, n_in, n_hid, nlayers=nlayers, prefix='GaussianMLP_hidden')
                
        ##mu&logvar are affine from h when encode
        self.mu_layer = HiddenLayer(
            input=self.hidden_layers[-1].output,
            n_in=self.hidden_layers[-1].n_out,
            n_out=n_out,
            activation=activation,  ##None T.nnet.softplus, not much diff. if logvar>0 freyfaces
            prefix='GaussianMLP_mu'
        )
        # log(sigma^2)  ##h generate logvar, not sigma! logvar=2logsigma
        self.logvar_layer = HiddenLayer(
            input=self.hidden_layers[-1].output,
            n_in=self.hidden_layers[-1].n_out,
            n_out=n_out,
            activation=activation,  ##None, ReLU|sigmoid, keep logvar>0 for freyfaces
            prefix='GaussianMLP_logvar'
        )
        self.mu = self.mu_layer.output
        self.var = T.exp(self.logvar_layer.output)
        self.params = self.params + self.mu_layer.params + self.logvar_layer.params
        
        def SampleKsi(d, u, mu, eps):  # icml14SBP(20)
            dn = 1.0/d
            uDnu = T.sum(u*u*dn)
            coeff = ( 1-1.0/T.sqrt(1.0+uDnu) ) / (uDnu+SMALLNUM)
            u = u.reshape((u.shape[0],1))
            R = T.diag(T.sqrt(dn)) - coeff*T.dot( T.dot(T.diag(dn),T.dot(u,u.T)), T.diag(T.sqrt(dn)) )
            return mu + T.dot(R,eps)

        if COV == False:
            self.sigma = T.sqrt(self.var)
            if eps:  # for use as encoder
                assert(y is None)
                self.out = self.mu + self.sigma * eps
            if y:  # for use as decoder
                assert(eps is None)
                self.out = T.nnet.sigmoid(self.mu)  ##the grey degree of each pixel
                #Gaussian-LL of data y under (z, params)
                self.cost = -T.sum(log_diag_mvn(self.out, self.var)(y))  ##(self.out, self.var)
        else:
            self.cov_u_layer = HiddenLayer(
                input=self.hidden_layers[-1].output,
                n_in=self.hidden_layers[-1].n_out,
                n_out=n_out,
                activation=activation,
                prefix='GaussianMLP_cov_u'
            )
            self.u = self.cov_u_layer.output
            self.params = self.params + self.cov_u_layer.params
            if eps:  ##icml14(21)
                assert(y is None)
                self.out, _ = scan(SampleKsi, sequences=[self.var, self.u, self.mu, eps])
            if y:  # for use as decoder
                assert(eps is None)
                self.out = T.nnet.sigmoid(self.mu)  ##the grey degree of each pixel
                self.cost = -T.sum(log_nondiag_mvn(self.mu, self.var, self.u)(y))