Exemplo n.º 1
0
 def get_output_step(self, cell_output):
     with tf.variable_scope('mean', reuse=tf.AUTO_REUSE):
         mean_net = DenseNet(input_=cell_output,
                             hidden_dim=-1, 
                             output_dim=self.output_dim, 
                             num_layers=1, 
                             transfer_fct=None,
                             act_out=self.act_out_mean, 
                             reuse=tf.AUTO_REUSE, 
                             kinit=self.kinit,
                             bias_init=self.bias_init)
         
         mean = mean_net.output
         
     with tf.variable_scope('var', reuse=tf.AUTO_REUSE):
         if(self.var_shared):
             var = utils.get_variable(self.output_dim, 'var')
             var = tf.tile(var, [self.batch_size, 1])# [batch_size, var.dim]
         else:
             var_net = DenseNet(input_=cell_output,
                                 hidden_dim=-1, 
                                 output_dim=self.output_dim, 
                                 num_layers=1, 
                                 transfer_fct=None,
                                 act_out=self.act_out_var, 
                                 reuse=tf.AUTO_REUSE, 
                                 kinit=self.kinit,
                                 bias_init=self.bias_init)
             
             var = var_net.output
     
     eps = tf.random_normal((self.batch_size, self.output_dim), 0, 1, dtype=tf.float32)
     current_z = tf.add(mean, tf.multiply(tf.sqrt(var), eps))
     return mean, var, current_z
Exemplo n.º 2
0
 def build(self, input_):
     output = None
     x = input_
     for i in range(self.num_layers+2):
         x = self.conv_layer(input_=x, 
                             filters=32, 
                             k_size=4,  #[4, 4]
                             stride=2, 
                             padding='SAME',
                             name=self.prefix + '_' + 'conv_' + str(
                                 i + 1) if self.prefix != '' else 'conv_' + str(i + 1),
                             act_func=self.transfer_fct)
                             
         if self.batch_norm:
             x = tf.layers.batch_normalization(x,name='batch_norm_'+str(i+1))
     
     x = tf.layers.flatten(x)
     
     dense = DenseNet(input_=x,
                      hidden_dim=self.hidden_dim, 
                      output_dim=self.output_dim, 
                      num_layers=self.num_layers, 
                      transfer_fct=self.transfer_fct,
                      act_out=self.act_out, 
                      reuse=self.reuse, 
                      kinit=self.kinit,
                      bias_init=self.bias_init,
                      drop_rate=self.drop_rate,
                      batch_norm=self.batch_norm,
                      prefix=self.prefix)
     x = dense.output
     return x
 def create_decoder(self, input_, hidden_dim, output_dim, num_layers, transfer_fct, \
                    act_out, reuse, kinit, bias_init, drop_rate, prefix, isConv):
     recons_ = DenseNet(input_=input_,
                        hidden_dim=hidden_dim,
                        output_dim=output_dim,
                        num_layers=num_layers,
                        transfer_fct=transfer_fct,
                        act_out=act_out,
                        reuse=reuse,
                        kinit=kinit,
                        bias_init=bias_init,
                        drop_rate=drop_rate,
                        prefix=prefix) if not isConv else \
         DeconvNet3(input_=input_,
                    num_layers=num_layers,
                    hidden_dim=hidden_dim,
                    output_dim=output_dim,
                    width=self.config.width,
                    height=self.config.height,
                    nchannels=self.config.num_channels,
                    transfer_fct=transfer_fct,
                    act_out=act_out,
                    reuse=reuse,
                    kinit=kinit,
                    bias_init=bias_init,
                    drop_rate=drop_rate,
                    prefix=prefix)
     return recons_
 def create_encoder(self, input_, hidden_dim, output_dim, num_layers, transfer_fct, \
                    act_out, reuse, kinit, bias_init, drop_rate, prefix, isConv):
     latent_ = DenseNet(input_=input_,
                        hidden_dim=hidden_dim,
                        output_dim=output_dim,
                        num_layers=num_layers,
                        transfer_fct=transfer_fct,
                        act_out=act_out,
                        reuse=reuse,
                        kinit=kinit,
                        bias_init=bias_init,
                        drop_rate=drop_rate,
                        prefix=prefix) if not isConv else \
         ConvNet3(input_=input_,
                  hidden_dim=hidden_dim,
                  output_dim=output_dim,
                  num_layers=num_layers,
                  transfer_fct=transfer_fct,
                  act_out=act_out,
                  reuse=reuse,
                  kinit=kinit,
                  bias_init=bias_init,
                  drop_rate=drop_rate,
                  prefix=prefix)
     return latent_
Exemplo n.º 5
0
 def get_next_input(self, x_time, current_z):
     with tf.variable_scope('aux', reuse=tf.AUTO_REUSE):
         aux_net = DenseNet(input_=current_z,
                             hidden_dim=-1, 
                             output_dim=self.output_dim, 
                             num_layers=1, 
                             transfer_fct=None,
                             act_out=tf.nn.sigmoid, 
                             reuse=tf.AUTO_REUSE)
         current_z = aux_net.output
     return tf.concat([tf.layers.dropout(x_time, rate=self.drop_rate_x), current_z],1)
Exemplo n.º 6
0
 def create_decoder(self,input_,hidden_dim,output_dim,num_layers,transfer_fct, \
                    act_out,reuse,kinit,bias_init, drop_rate):
     recons_ = DenseNet(input_=input_,
                        hidden_dim=hidden_dim,
                        output_dim=output_dim,
                        num_layers=num_layers,
                        transfer_fct=transfer_fct,
                        act_out=act_out,
                        reuse=reuse,
                        kinit=kinit,
                        bias_init=bias_init,
                        drop_rate=drop_rate)
     return recons_
Exemplo n.º 7
0
    def Px_z_graph(self, z_input, reuse):
        with tf.variable_scope('Pz_x', reuse=reuse):
            Px_z_mean = DenseNet(input_=z_input,
                                 hidden_dim=self.hidden_dim,
                                 output_dim=self.x_flat_dim,
                                 num_layers=2,
                                 transfer_fct=self.transfer_fct,
                                 act_out=tf.nn.sigmoid,
                                 reuse=self.reuse,
                                 kinit=self.kinit,
                                 bias_init=self.bias_init,
                                 drop_rate=self.drop_rate)

            x_recons_mean_flat = Px_z_mean.output

        return x_recons_mean_flat
Exemplo n.º 8
0
    def build(self, input_):
        aux_size = self.width // 2 // 2
        aux_size_2 = self.height // 2 // 2
        initial_n_channels = 64
        out_dense_dim = aux_size * aux_size_2 * initial_n_channels
        hidden_dim = input_.get_shape()[-1].value * 3

        dense = DenseNet(input_=input_,
                         hidden_dim=hidden_dim,
                         output_dim=out_dense_dim,
                         num_layers=2,
                         transfer_fct=self.transfer_fct,
                         act_out=self.transfer_fct,
                         reuse=self.reuse,
                         kinit=self.kinit,
                         bias_init=self.bias_init,
                         drop_rate=self.drop_rate)
        x = dense.output
        x = tf.reshape(x, [-1, aux_size, aux_size_2, initial_n_channels])
        x = self.deconv_layer(
            input_=x,
            filters=64,
            k_size=4,  # [4, 4]
            stride=2,
            padding='SAME',
            name='deconv_1',
            act_func=self.transfer_fct)

        x = self.deconv_layer(
            input_=x,
            filters=32,
            k_size=4,  # [4, 4]
            stride=2,
            padding='SAME',
            name='deconv_2',
            act_func=self.transfer_fct)
        x = self.deconv_layer(
            input_=x,
            filters=self.nchannels,
            k_size=4,  # [4, 4]
            stride=1,
            padding='SAME',
            name='deconv_3',
            act_func=self.act_out)

        return x
Exemplo n.º 9
0
 def build(self, input_):
     output = None
     x = self.conv_layer(input_=input_, 
                         filters=32, 
                         k_size=4,  #[4, 4]
                         stride=2, 
                         padding='SAME', 
                         name='conv_1',  
                         act_func=self.transfer_fct)
     
     x = self.conv_layer(input_=x, 
                         filters=64, 
                         k_size=4,  #[4, 4]
                         stride=2, 
                         padding='SAME', 
                         name='conv_2',  
                         act_func=self.transfer_fct)
     x = self.conv_layer(input_=x, 
                         filters=64, 
                         k_size=4,  #[4, 4]
                         stride=2, 
                         padding='SAME', 
                         name='conv_3',  
                         act_func=self.transfer_fct)
     
     x = tf.contrib.layers.flatten(x)
     
     dense = DenseNet(input_=x,
                      hidden_dim=self.hidden_dim, 
                      output_dim=self.output_dim, 
                      num_layers=2, 
                      transfer_fct=self.transfer_fct,
                      act_out=self.act_out, 
                      reuse=self.reuse, 
                      kinit=self.kinit,
                      bias_init=self.bias_init,
                      drop_rate=self.drop_rate)
     x = dense.output
     return x
Exemplo n.º 10
0
    def build(self, input_):
        output = None
        x = self.conv_layer(input_=input_, 
                            filters=32, 
                            k_size=4,  #[4, 4]
                            stride=2, 
                            padding='SAME', 
                            name=self.prefix + '_'+'conv_1' if self.prefix !='' else 'conv_1',
                            act_func=self.transfer_fct)
                            
        if self.batch_norm:
            x = tf.layers.batch_normalization(x,name='batch_norm_1')                      
        
        x = self.conv_layer(input_=x, 
                            filters=64, 
                            k_size=4,  #[4, 4]
                            stride=2, 
                            padding='SAME',
                            name=self.prefix + '_' + 'conv_2' if self.prefix != '' else 'conv_2',
                            act_func=self.transfer_fct)
        
        if self.batch_norm:
            x = tf.layers.batch_normalization(x,name='batch_norm_2')                      
                            
        x = self.conv_layer(input_=x, 
                            filters=64, 
                            k_size=4,  #[4, 4]
                            stride=2, 
                            padding='SAME',
                            name=self.prefix + '_' + 'conv_3' if self.prefix != '' else 'conv_3',
                            act_func=self.transfer_fct)
        
        x = tf.layers.flatten(x)
        
        
        dense = DenseNet(input_=x,
                         hidden_dim=self.hidden_dim, 
                         output_dim=self.hidden_dim, 
                         num_layers=1, 
                         transfer_fct=self.transfer_fct,
                         act_out=self.transfer_fct, 
                         reuse=self.reuse, 
                         kinit=self.kinit,
                         bias_init=self.bias_init,
                         drop_rate=self.drop_rate,
                         batch_norm=self.batch_norm,
                         prefix=self.prefix)

        x = dense.output
        with tf.variable_scope('mean', reuse=self.reuse):
            dense_mean = DenseNet(input_=x,
                             hidden_dim=self.hidden_dim, 
                             output_dim=self.output_dim, 
                             num_layers=1, 
                             transfer_fct=self.transfer_fct,
                             act_out=self.act_out_mean, 
                             reuse=self.reuse, 
                             kinit=self.kinit,
                             bias_init=self.bias_init,
                             drop_rate=self.drop_rate,
                             batch_norm=self.batch_norm,
                             prefix=self.prefix)

        with tf.variable_scope('var', reuse=self.reuse):
            dense_var = DenseNet(input_=x,
                             hidden_dim=self.hidden_dim, 
                             output_dim=self.output_dim, 
                             num_layers=1, 
                             transfer_fct=self.transfer_fct,
                             act_out=self.act_out_var, 
                             reuse=self.reuse, 
                             kinit=self.kinit,
                             bias_init=self.bias_init,
                             drop_rate=self.drop_rate,
                             batch_norm=self.batch_norm,
                             prefix=self.prefix)
        return dense_mean.output, dense_var.output
Exemplo n.º 11
0
    def Pz_wy_graph(self, w_input, reuse):
        with tf.variable_scope('Pz_wy', reuse=reuse):

            Pz_wy = DenseNet(input_=w_input,
                             hidden_dim=self.hidden_dim,
                             output_dim=self.hidden_dim,
                             num_layers=1,
                             transfer_fct=self.transfer_fct,
                             act_out=self.transfer_fct,
                             reuse=self.reuse,
                             kinit=self.kinit,
                             bias_init=self.bias_init,
                             drop_rate=self.drop_rate)
            aux = Pz_wy.output

            z_wy_mean_list = list()
            with tf.variable_scope('mean', reuse=reuse):
                for i in range(self.K):
                    Pz_wy_mean = DenseNet(
                        input_=aux,
                        hidden_dim=self.hidden_dim,
                        output_dim=self.z_dim,
                        num_layers=0,
                        transfer_fct=self.transfer_fct,
                        act_out=None,
                        reuse=reuse,
                        kinit=self.kinit,
                        bias_init=tf.truncated_normal_initializer(stddev=1),
                        drop_rate=self.drop_rate)
                    z_mean = Pz_wy_mean.dense(input_=aux,
                                              output_dim=self.z_dim,
                                              name='dense_' + str(i),
                                              act_func=None)
                    # z_mean = tf.scalar_mul(max_value,z_mean)
                    z_wy_mean_list.append(z_mean)

            z_wy_var_list = list()
            with tf.variable_scope('var', reuse=reuse):
                for i in range(self.K):
                    Pz_wy_var = DenseNet(
                        input_=aux,
                        hidden_dim=self.hidden_dim,
                        output_dim=self.z_dim,
                        num_layers=0,
                        transfer_fct=self.transfer_fct,
                        act_out=self.sigma_act,
                        reuse=reuse,
                        kinit=self.kinit,
                        bias_init=tf.truncated_normal_initializer(stddev=1),
                        drop_rate=self.drop_rate)
                    z_var = Pz_wy_var.dense(input_=aux,
                                            output_dim=self.z_dim,
                                            name='dense_' + str(i),
                                            act_func=self.sigma_act)
                    # z_var = tf.scalar_mul(max_value,z_var)
                    z_wy_var_list.append(z_var)

        z_wy_mean_stack = tf.stack(z_wy_mean_list)  # [K, batch_size, z_dim]
        z_wy_var_stack = tf.stack(z_wy_var_list)  # [K, batch_size, z_dim]
        z_wy_logvar_stack = tf.log(z_wy_var_stack)

        return z_wy_mean_list, z_wy_var_list
Exemplo n.º 12
0
    def create_graph(self):
        print('\n[*] Defining Q(z|x)...')

        with tf.variable_scope('Qz_x', reuse=self.reuse):
            Qz_x = ConvNet3Gauss(input_=self.x_batch,
                                 hidden_dim=self.z_dim * 2,
                                 output_dim=self.z_dim,
                                 reuse=self.reuse,
                                 transfer_fct=self.transfer_fct,
                                 act_out_mean=None,
                                 act_out_var=tf.nn.softplus,
                                 drop_rate=self.drop_rate,
                                 kinit=self.kinit,
                                 bias_init=self.bias_init)

            self.z_x_mean = Qz_x.mean
            self.z_x_var = Qz_x.var

        print('\n[*] Reparameterization trick...')
        self.z_x_logvar = tf.log(self.z_x_var)
        eps = tf.random_normal((self.batch_size, self.z_dim),
                               0,
                               1,
                               dtype=tf.float32)
        self.z_x = tf.add(self.z_x_mean, tf.multiply(tf.sqrt(self.z_x_var),
                                                     eps))

        print('\n[*] Defining Q(w|x)...')
        with tf.variable_scope('Qw_x', reuse=self.reuse):
            Qz_x = ConvNet3Gauss(input_=self.x_batch,
                                 hidden_dim=self.w_dim * 2,
                                 output_dim=self.w_dim,
                                 reuse=self.reuse,
                                 transfer_fct=self.transfer_fct,
                                 act_out_mean=None,
                                 act_out_var=tf.nn.softplus,
                                 drop_rate=self.drop_rate,
                                 kinit=self.kinit,
                                 bias_init=self.bias_init)

            self.w_x_mean = Qz_x.mean
            self.w_x_var = Qz_x.var

        print('\n[*] Reparameterization trick...')
        self.w_x_logvar = tf.log(self.w_x_var)
        eps = tf.random_normal((self.batch_size, self.w_dim),
                               0,
                               1,
                               dtype=tf.float32)
        self.w_x = tf.add(self.w_x_mean, tf.multiply(tf.sqrt(self.w_x_var),
                                                     eps))

        print('\n[*] Defining P(y|w,z)...')
        with tf.variable_scope('Py_wz', reuse=self.reuse):
            zw = tf.concat([self.w_x, self.z_x], 1, name='wz_concat')
            Py_wz = DenseNet(input_=zw,
                             hidden_dim=self.hidden_dim,
                             output_dim=self.K,
                             num_layers=self.num_layers,
                             transfer_fct=self.transfer_fct,
                             act_out=None,
                             reuse=self.reuse,
                             kinit=self.kinit,
                             bias_init=self.bias_init,
                             drop_rate=self.drop_rate)
            self.py_wz_logit = Py_wz.output
            self.py_wz = tf.nn.softmax(self.py_wz_logit)
        # Add small constant to avoid tf.log(0)
        self.log_py_wz = tf.log(1e-10 + self.py_wz)

        print('\n[*] Defining P(z|w,y)...')
        z_wy_mean_list, z_wy_var_list = self.Pz_wy_graph(self.w_x, self.reuse)

        self.z_wy_mean_stack = tf.stack(
            z_wy_mean_list)  # [K, batch_size, z_dim]
        self.z_wy_var_stack = tf.stack(z_wy_var_list)  # [K, batch_size, z_dim]
        self.z_wy_logvar_stack = tf.log(self.z_wy_var_stack)

        print('\n[*] Defining P(x|z)...')
        self.x_recons_mean_flat = self.Px_z_graph(self.z_x, self.reuse)

        eps = tf.random_normal(tf.shape(self.x_recons_mean_flat),
                               0,
                               1,
                               dtype=tf.float32)
        self.x_recons_flat = tf.add(self.x_recons_mean_flat,
                                    tf.multiply(tf.sqrt(self.sigma), eps))
        self.x_recons = tf.reshape(
            self.x_recons_flat, [-1, self.width, self.height, self.nchannel])

        print('\n[*] Defining sampling...')
        self.w_sample = tf.random_normal((self.batch_size, self.w_dim),
                                         0,
                                         1,
                                         dtype=tf.float32)
        self.z_wy_mean_list_sample, self.z_wy_var_list_sample = self.Pz_wy_graph(
            self.w_sample, True)
        self.z_sample_list = list()
        for i in range(self.K):
            eps = tf.random_normal(tf.shape(self.z_wy_mean_list_sample[i]),
                                   0,
                                   1,
                                   dtype=tf.float32)
            z_sample = tf.add(
                self.z_wy_mean_list_sample[i],
                tf.multiply(tf.sqrt(self.z_wy_var_list_sample[i]), eps))
            self.z_sample_list.append(z_sample)

        self.x_sample_mean_flat_list = list()
        self.x_sample_flat_list = list()
        self.x_sample_list = list()
        for i in range(self.K):
            x_sample_mean_flat = self.Px_z_graph(self.z_sample_list[i], True)
            self.x_sample_mean_flat_list.append(x_sample_mean_flat)

            eps = tf.random_normal(tf.shape(x_sample_mean_flat),
                                   0,
                                   1,
                                   dtype=tf.float32)
            x_sample_flat = tf.add(x_sample_mean_flat,
                                   tf.multiply(tf.sqrt(self.sigma), eps))
            x_sample = tf.reshape(x_sample_flat,
                                  [-1, self.width, self.height, self.nchannel])

            self.x_sample_flat_list.append(x_sample_flat)
            self.x_sample_list.append(x_sample)
Exemplo n.º 13
0
 def build(self, input_):
     output = None
     x = self.conv_layer(input_=input_, 
                         filters=32, 
                         k_size=4,  #[4, 4]
                         stride=2, 
                         padding='SAME', 
                         name='conv_1',  
                         act_func=self.transfer_fct)
     
     x = self.conv_layer(input_=x, 
                         filters=64, 
                         k_size=4,  #[4, 4]
                         stride=2, 
                         padding='SAME', 
                         name='conv_2',  
                         act_func=self.transfer_fct)
     x = self.conv_layer(input_=x, 
                         filters=64, 
                         k_size=4,  #[4, 4]
                         stride=2, 
                         padding='SAME', 
                         name='conv_3',  
                         act_func=self.transfer_fct)
     
     x = tf.contrib.layers.flatten(x)
     
     
     dense = DenseNet(input_=x,
                      hidden_dim=self.hidden_dim, 
                      output_dim=self.hidden_dim, 
                      num_layers=1, 
                      transfer_fct=self.transfer_fct,
                      act_out=self.transfer_fct, 
                      reuse=self.reuse, 
                      kinit=self.kinit,
                      bias_init=self.bias_init,
                      drop_rate=self.drop_rate)
     
     
     x = dense.output
     with tf.variable_scope('mean', reuse=self.reuse):
         dense_mean = DenseNet(input_=x,
                          hidden_dim=self.hidden_dim, 
                          output_dim=self.output_dim, 
                          num_layers=1, 
                          transfer_fct=self.transfer_fct,
                          act_out=self.act_out_mean, 
                          reuse=self.reuse, 
                          kinit=self.kinit,
                          bias_init=self.bias_init,
                          drop_rate=self.drop_rate)
     with tf.variable_scope('var', reuse=self.reuse):
         dense_var = DenseNet(input_=x,
                          hidden_dim=self.hidden_dim, 
                          output_dim=self.output_dim, 
                          num_layers=1, 
                          transfer_fct=self.transfer_fct,
                          act_out=self.act_out_var, 
                          reuse=self.reuse, 
                          kinit=self.kinit,
                          bias_init=self.bias_init,
                          drop_rate=self.drop_rate)
     return dense_mean.output, dense_var.output
Exemplo n.º 14
0
    def build(self, input_):
        aux_size = self.width // 2 // 2
        aux_size_2 = self.height // 2 // 2
        initial_n_channels = 64
        out_dense_dim = aux_size * aux_size_2 * initial_n_channels
        hidden_dim = input_.get_shape()[-1].value * 3

        dense = DenseNet(input_=input_,
                         hidden_dim=hidden_dim,
                         output_dim=out_dense_dim,
                         num_layers=self.num_layers,
                         transfer_fct=self.transfer_fct,
                         act_out=self.transfer_fct,
                         reuse=self.reuse,
                         kinit=self.kinit,
                         bias_init=self.bias_init,
                         drop_rate=self.drop_rate,
                         batch_norm=self.batch_norm)
        x = dense.output
        x = tf.reshape(x, [-1, aux_size, aux_size_2, initial_n_channels])

        x = dense.output
        x = tf.reshape(x, [-1, aux_size, aux_size_2, initial_n_channels])
        x = self.deconv_layer(
            input_=x,
            filters=64,
            k_size=4,  #[4, 4]
            stride=2,
            padding='SAME',
            name='deconv_1',
            act_func=self.transfer_fct)

        if self.batch_norm:
            x = tf.layers.batch_normalization(x, name='batch_norm_1')

        x = self.deconv_layer(
            input_=x,
            filters=32,
            k_size=4,  #[4, 4]
            stride=2,
            padding='SAME',
            name='deconv_2',
            act_func=self.transfer_fct)

        if self.batch_norm:
            x = tf.layers.batch_normalization(x, name='batch_norm_2')

        for i in range(self.num_layers):
            x = self.deconv_layer(
                input_=x,
                filters=self.nchannels,
                k_size=4,  #[4, 4]
                stride=1,
                padding='SAME',
                name='deconv_' + str(i + 3),
                act_func=self.transfer_fct)
            if self.batch_norm:
                x = tf.layers.batch_normalization(x,
                                                  name='batch_norm_' +
                                                  str(i + 3))

        x = self.deconv_layer(
            input_=x,
            filters=self.nchannels,
            k_size=4,  #[4, 4]
            stride=1,
            padding='SAME',
            name='deconv_' + str(i + 4),
            act_func=self.act_out)

        x = tf.contrib.layers.flatten(x)
        return x
Exemplo n.º 15
0
    def create_graph(self):
        print('\n[*] Defining encoder...')

        with tf.variable_scope('encoder_mean', reuse=self.reuse):
            Qz_x_mean = DenseNet(input_=self.x_batch_flat,
                            hidden_dim=self.hidden_dim, 
                            output_dim=self.z_dim, 
                            num_layers=self.num_layers, 
                            transfer_fct=self.transfer_fct,
                            act_out=None, 
                            reuse=self.reuse, 
                            kinit=self.kinit,
                            bias_init=self.bias_init,
                            drop_rate=self.drop_rate)
        
            self.encoder_mean = Qz_x_mean.output
            
        with tf.variable_scope('encoder_var', reuse=self.reuse):
            Qz_x_var = DenseNet(input_=self.x_batch_flat,
                            hidden_dim=self.hidden_dim, 
                            output_dim=self.z_dim, 
                            num_layers=self.num_layers, 
                            transfer_fct=self.transfer_fct,
                            act_out=tf.nn.softplus, 
                            reuse=self.reuse, 
                            kinit=self.kinit,
                            bias_init=self.bias_init,
                            drop_rate=self.drop_rate)
        
            self.encoder_var = Qz_x_var.output
        
        print('\n[*] Reparameterization trick...')
        self.encoder_logvar = tf.log(self.encoder_var)
        eps = tf.random_normal((self.batch_size, self.z_dim), 0, 1, dtype=tf.float32)
        self.z = tf.add(self.encoder_mean, tf.multiply(tf.sqrt(self.encoder_var), eps))
       
        print('\n[*] Defining decoder...')
        with tf.variable_scope('decoder_mean', reuse=self.reuse):
            Px_z_mean = DenseNet(input_=self.z,
                            hidden_dim=self.hidden_dim, 
                            output_dim=self.x_flat_dim, 
                            num_layers=2, 
                            transfer_fct=self.transfer_fct,
                            act_out=tf.nn.sigmoid, 
                            reuse=self.reuse, 
                            kinit=self.kinit,
                            bias_init=self.bias_init,
                            drop_rate=self.drop_rate)
        
            self.decoder_mean_flat = Px_z_mean.output
        
        eps = tf.random_normal(tf.shape(self.decoder_mean_flat), 0, 1, dtype=tf.float32)
        self.decoder_x_flat = tf.add(self.decoder_mean_flat, tf.multiply(tf.sqrt(self.sigma), eps))
        self.decoder_x = tf.reshape(self.decoder_x_flat , [-1,self.width, self.height, self.nchannel])

        
        print('\n[*] Defining sampling...')
        self.z_sample = tf.random_normal((self.batch_size, self.z_dim), 0, 1, dtype=tf.float32)
        
        with tf.variable_scope('decoder_mean', reuse=True):
            Px_z_mean = DenseNet(input_=self.z_sample,
                            hidden_dim=self.hidden_dim, 
                            output_dim=self.x_flat_dim, 
                            num_layers=2, 
                            transfer_fct=self.transfer_fct,
                            act_out=tf.nn.sigmoid, 
                            reuse=True, 
                            kinit=self.kinit,
                            bias_init=self.bias_init,
                            drop_rate=self.drop_rate)
        
            self.samples_mean_flat = Px_z_mean.output
        
        eps = tf.random_normal(tf.shape(self.samples_mean_flat), 0, 1, dtype=tf.float32)
        self.samples_flat = tf.add(self.samples_mean_flat, tf.multiply(tf.sqrt(self.sigma), eps))
        self.samples = tf.reshape(self.samples_flat , [-1,self.width, self.height, self.nchannel])