Exemplo n.º 1
0
    def FCDiscriminator_sampler(self, inputs, FC_DIM=512, n_layers=3):
        with tf.variable_scope('Discriminator') as scope:
            scope.reuse_variables()
            output = tf.reshape(inputs, [-1, self.num_neurons, self.num_bins])
            conv1d_II.set_weights_stdev(0.02)
            output, filters = conv1d_II.Conv1D('Input',
                                               self.num_neurons,
                                               self.num_features,
                                               self.kernel_width,
                                               output,
                                               stride=1,
                                               save_filter=True)
            output = tf.reshape(output,
                                [-1, self.num_features * self.num_bins])
            outputs_mat = [output]
            output = act_funct.LeakyReLULayer(
                '0', self.num_features * self.num_bins, FC_DIM, output)
            outputs_mat.append(output)
            for i in range(n_layers - 1):
                output = act_funct.LeakyReLULayer('{}'.format(i + 1), FC_DIM,
                                                  FC_DIM, output)
                outputs_mat.append(output)
            output = linear.Linear('Out', FC_DIM, 1, output)
            conv1d_II.unset_weights_stdev()

            return tf.reshape(output, [-1]), [filters], outputs_mat
Exemplo n.º 2
0
    def FCDiscriminator(self, inputs, n_layers=3):
        output = act_funct.LeakyReLULayer('Discriminator.Input',
                                          self.output_dim, self.num_units,
                                          inputs)
        for i in range(n_layers):
            output = act_funct.LeakyReLULayer('Discriminator.{}'.format(i),
                                              self.num_units, self.num_units,
                                              output)
        output = linear.Linear('Discriminator.Out', self.num_units, 1, output)

        return tf.reshape(output, [-1])
Exemplo n.º 3
0
 def FCDiscriminator_sampler(self, inputs, n_layers=3):
     output = act_funct.LeakyReLULayer('Discriminator.Input',
                                       self.output_dim, self.num_units,
                                       inputs)
     outputs_mat = [output]
     for i in range(n_layers):
         output = act_funct.LeakyReLULayer('Discriminator.{}'.format(i),
                                           self.num_units, self.num_units,
                                           output)
         outputs_mat.append(output)
     output = linear.Linear('Discriminator.Out', self.num_units, 1, output)
     filters = []
     return tf.reshape(output, [-1]), filters, outputs_mat
Exemplo n.º 4
0
 def FCDiscriminator(self, inputs, print_arch=True):
     output = act_funct.LeakyReLULayer('Discriminator.Input',
                                       self.output_dim, self.num_units,
                                       inputs)
     if print_arch:
         print('DISCRIMINATOR. -------------------------------')
         print(str(output.get_shape()) + ' input')
     for i in range(self.num_layers):
         output = act_funct.LeakyReLULayer('Discriminator.{}'.format(i),
                                           self.num_units, self.num_units,
                                           output)
         if print_arch:
             print(str(output.get_shape()) + ' layer ' + str(i))
     output = linear.Linear('Discriminator.Out', self.num_units, 1, output)
     if print_arch:
         print(str(output.get_shape()) + ' output')
     return tf.reshape(output, [-1])
Exemplo n.º 5
0
 def FCDiscriminator(self, inputs, FC_DIM=512, n_layers=3):
     output = tf.reshape(inputs, [-1, self.num_neurons, self.num_bins])
     conv1d_II.set_weights_stdev(0.02)
     output = conv1d_II.Conv1D('Discriminator.Input',
                               self.num_neurons,
                               self.num_features,
                               self.kernel_width,
                               output,
                               stride=1)
     output = tf.reshape(output, [-1, self.num_features * self.num_bins])
     output = act_funct.LeakyReLULayer('Discriminator.0',
                                       self.num_features * self.num_bins,
                                       FC_DIM, output)
     for i in range(n_layers - 1):
         output = act_funct.LeakyReLULayer('Discriminator.{}'.format(i + 1),
                                           FC_DIM, FC_DIM, output)
     output = linear.Linear('Discriminator.Out', FC_DIM, 1, output)
     conv1d_II.unset_weights_stdev()
     return tf.reshape(output, [-1])
Exemplo n.º 6
0
    def FCGenerator(self, n_samples, noise=None, print_arch=True):
        if noise is None:
            noise = tf.random_normal([n_samples, 128])
        if print_arch:
            print('GENERATOR. -------------------------------')
            print(str(noise.get_shape()) + ' latent variable')
        output = act_funct.ReLULayer('Generator.Input', 128, self.num_units,
                                     noise)
        if print_arch:
            print(str(output.get_shape()) + ' linear projection')
        for i in range(self.num_layers):
            output = act_funct.LeakyReLULayer('Generator.{}'.format(i),
                                              self.num_units, self.num_units,
                                              output)
            if print_arch:
                print(str(output.get_shape()) + ' layer ' + str(i))
        output = linear.Linear('Generator.Out', self.num_units,
                               self.output_dim, output)
        if print_arch:
            print(str(output.get_shape()) + ' output')

        output = tf.nn.sigmoid(output)

        return output