示例#1
0
    def create_cond_critic_proj(cls, xinput, input_clusters, var_scope,
                           critic_layers, clusters_no, reuse=None):
        """
        Class method that instantiates a Critic and creates a conditional
         critic with the original projection conditioning method.

        Parameters
        ----------
        xinput : Tensor
            Tensor containing the input cells.
        input_clusters : Tensor
            Tensor containing the corresponding cluster indexes of the input cells.
        var_scope : str
            Variable scope used for the created tensors.
        critic_layers : list
            List of integers corresponding to the number of neurons of each
             layer of the critic.
        clusters_no : int
            Number of clusters.
        reuse : Boolean
            Whether to reuse the already existing Tensors.
            Default is None.

        Returns
        -------
        A Creator object with the defined architecture.

        """

        with tf.variable_scope(var_scope, reuse=reuse):
            for i_lay, output_size in enumerate(critic_layers):
                with tf.variable_scope("layers_" + str(i_lay + 1)):
                    xinput = layers.relu(
                        xinput,
                        output_size,
                        weights_initializer=layers.variance_scaling_initializer(mode="FAN_AVG"),
                        biases_initializer=tf.zeros_initializer())

            with tf.variable_scope("layers_" + 'proj'):
                proj_weights_m = tf.get_variable(
                    "proj_weights_m",
                    [clusters_no, critic_layers[-1], 1],
                    dtype=tf.float32, initializer=layers.xavier_initializer())

                proj_weights = tf.nn.embedding_lookup(proj_weights_m,
                                                      input_clusters)

                output_proj = tf.einsum('ij,ijk->ik', xinput, proj_weights)

            with tf.variable_scope("layers_" + 'output'):
                output = layers.linear(
                    xinput, 1,
                    weights_initializer=layers.xavier_initializer(),
                    biases_initializer=tf.zeros_initializer())

                dist = tf.add(output_proj, output)

        return cls(xinput, dist, var_scope, critic_layers,
                   input_clusters=input_clusters,
                   clusters_no=clusters_no, reuse=reuse)
示例#2
0
    def create_critic(cls, xinput, var_scope, critic_layers, reuse=None):
        """
        Class method that instantiates a Critic and creates a
         non-conditional critic.

        Parameters
        ----------
        xinput : Tensor
            Tensor containing the input cells.
        var_scope : str
            Variable scope used for the created tensors.
        critic_layers : list
            List of integers corresponding to the number of neurons of each
             layer of the critic.
        reuse : Boolean
            Whether to reuse the already existing Tensors.
            Default is None.

        Returns
        -------
        A Creator object with the defined architecture.

        """

        with tf.variable_scope(var_scope, reuse=reuse):
            for i_lay, output_size in enumerate(critic_layers):
                with tf.variable_scope("layers_" + str(i_lay + 1)):
                    xinput = layers.relu(
                        xinput,
                        output_size,
                        weights_initializer=layers.
                        variance_scaling_initializer(mode="FAN_AVG"),
                        biases_initializer=tf.zeros_initializer())

            with tf.variable_scope("layers_" + 'output'):
                output = layers.linear(
                    xinput,
                    1,
                    weights_initializer=layers.xavier_initializer(),
                    biases_initializer=tf.zeros_initializer())

        return cls(xinput, output, var_scope, critic_layers, reuse=reuse)
示例#3
0
    def create_cond_generator(cls, z_input, batch_size, latent_dim,
                              output_cells_dim, var_scope, gen_layers,
                              output_lsn, gen_cond_type, clusters_ratios,
                              is_training, clusters_no=None,
                              input_clusters=None, reuse=None):
        """
        Class method that instantiates a Generator and creates a
        conditional generator.

        Parameters
        ----------
        z_input : Tensor
            Tensor containing the noise used as input by the generator.
        batch_size : int
            Batch size used during the training.
        latent_dim : int
            Dimension of the latent space used from which the input noise
            of the generator is sampled.
        output_cells_dim : int
            Dimension of the output cells (i.e. the number of genes).
        var_scope : str
            Variable scope used for the created tensors.
        gen_layers : list
            List of integers corresponding to the number of neurons of
            each layer of the generator.
        output_lsn : int, None
            Parameter of the LSN layer at the output of the critic
            (i.e. total number of counts per generated cell).
        gen_cond_type : str
            conditional normalization layers used in the generator, can be
             either "batchnorm" or "layernorm". If anything else, it won't be
              added in the model (which means no conditional generation).
        clusters_ratios : Tensor
            Placeholder containing the list of cluster ratios of the input data.
        is_training : Tensor
            Boolean placeholder encoding for whether we're in training or
            inference mode (for the batch normalization).
        clusters_no : int
            Number of clusters.
            Default is None.
        input_clusters : Tensor
            Placeholders for the cluster indexes that should be used for
            conditional generation.
            Default is None.
        reuse : Boolean
            Whether to reuse the already existing Tensors.
            Default is None.

        Returns
        -------
        A conditional Generator object with the defined architecture.
        """

        with tf.variable_scope(var_scope, reuse=reuse):

            for i_lay, size in enumerate(gen_layers):
                with tf.variable_scope("generator_layers_" + str(i_lay + 1)):
                    z_input = layers.linear(
                        z_input,
                        size,
                        weights_initializer=layers.xavier_initializer(),
                        biases_initializer=None)

                    if i_lay != -1:
                        if gen_cond_type == "batchnorm":
                            z_input = batchnorm(
                                [0], z_input,
                                is_training=is_training,
                                labels=input_clusters,
                                n_labels=clusters_no)

                        elif gen_cond_type == "layernorm":
                            z_input = layernorm([1],
                                                z_input,
                                                labels=input_clusters,
                                                n_labels=clusters_no)

                    z_input = tf.nn.relu(z_input)

            with tf.variable_scope("generator_layers_" + 'output'):
                fake_outputs = layers.relu(
                    z_input, output_cells_dim,
                    weights_initializer=layers.variance_scaling_initializer(mode="FAN_AVG"),
                    biases_initializer=tf.zeros_initializer())

                if output_lsn is not None:
                    gammas_output = tf.Variable(
                        np.ones(z_input.shape.as_list()[0]) * output_lsn,
                        trainable=False)
                    sigmas = tf.reduce_sum(fake_outputs, axis=1)
                    scale_ls = tf.cast(gammas_output, dtype=tf.float32) / \
                        (sigmas + sys.float_info.epsilon)

                    fake_outputs = tf.transpose(tf.transpose(fake_outputs) *
                                                scale_ls)

            return cls(fake_outputs, batch_size, latent_dim, output_cells_dim,
                       var_scope, gen_layers, output_lsn,
                       gen_cond_type=gen_cond_type, is_training=is_training,
                       clusters_ratios=clusters_ratios, clusters_no=clusters_no,
                       input_clusters=input_clusters, reuse=reuse)