Ejemplo n.º 1
0
    def __init__(self, latent_size_prev, latent_size):
        super(StyleBlock, self).__init__()
        self.lrelu = lambda x: tf.keras.activations.relu(x, alpha=0.2)
        self.std_dense = layers.ScaledDense(
            latent_size_prev, bias_initializer=tf.ones_initializer())
        self.latent_size = latent_size
        self.conv_outsize = latent_size

        if USE_POINTWISE_DENSES:
            self.D1 = layers.ScaledDense(latent_size * 4,
                                         activation=self.lrelu)
            self.D2 = layers.ScaledDense(latent_size, activation=self.lrelu)
Ejemplo n.º 2
0
 def __init__(self, latent_size, n_denses):
     super(LatentMapping, self).__init__()
     self.lrelu = lambda x: tf.keras.activations.relu(x, alpha=0.2)
     self.denses = [
         layers.ScaledDense(latent_size, activation=self.lrelu)
         for _ in range(n_denses)
     ]
Ejemplo n.º 3
0
    def __init__(self):
        super(GAN_d, self).__init__()
        self.lrelu = lambda x: tf.keras.activations.relu(x, alpha=0.2)
        self.cstart = layers.ScaledDense(
            LATENT_SIZES[-1], activation=self.lrelu)  #equivalent to 1x1 conv

        self.blocks = [
            D_block(LATENT_SIZES[-1], True, minibatch_stddev=True),
            D_block(LATENT_SIZES[-2], True, minibatch_stddev=True),
            D_block(LATENT_SIZES[-3], True, minibatch_stddev=True),
            D_block(LATENT_SIZES[-4], True, minibatch_stddev=True),
            D_block(LATENT_SIZES[-5], True, minibatch_stddev=True),
            D_block(LATENT_SIZES[-6], False, minibatch_stddev=True)
        ]
        self.cend = layers.ScaledDense(
            LATENT_SIZES[-6], activation=self.lrelu
        )  #the last layer is basically a dense over the whole thing.
        self.cend2 = layers.ScaledDense(1, use_bias=False)
Ejemplo n.º 4
0
    def __init__(self, latent_size, downsample, minibatch_stddev=False):
        super(D_block, self).__init__()
        self.lrelu = lambda x: tf.keras.activations.relu(x, alpha=0.2)

        self.conv1 = layers.ScaledConv2D(latent_size,
                                         activation=self.lrelu,
                                         filter_size=3)
        self.conv2 = layers.ScaledConv2D(latent_size,
                                         activation=self.lrelu,
                                         filter_size=3)

        self.conv_residual = layers.ScaledDense(
            latent_size, activation=self.lrelu)  #equivalent to 1x1 conv
        if downsample:
            self.resizer = tf.keras.layers.AveragePooling2D()
        else:
            self.resizer = None
        self.minibatch_stddev = minibatch_stddev
        self.normalisation_coef = tf.math.rsqrt(2.0)
Ejemplo n.º 5
0
 def __init__(self, image_channels, latent_size):
     super(PicOutBlock, self).__init__()
     self.std_dense = layers.ScaledDense(
         latent_size, bias_initializer=tf.ones_initializer())
     self.pic_out = layers.ScaledDense(image_channels, use_bias=False)