def __init__(self, w_dim, labels_dim, n_mapping, **kwargs): super(Mapping, self).__init__(**kwargs) self.w_dim = w_dim self.labels_dim = labels_dim self.n_mapping = n_mapping self.gain = 1.0 self.lrmul = 0.01 if self.labels_dim > 0: self.labels_embedding = LabelEmbedding(embed_dim=self.w_dim, name='labels_embedding') self.normalize = tf.keras.layers.Lambda(lambda x: x * tf.math.rsqrt( tf.reduce_mean(tf.square(x), axis=1, keepdims=True) + 1e-8)) self.dense_layers = list() self.bias_layers = list() self.act_layers = list() for ii in range(self.n_mapping): self.dense_layers.append( Dense(w_dim, gain=self.gain, lrmul=self.lrmul, name='dense_{:d}'.format(ii))) self.bias_layers.append( Bias(lrmul=self.lrmul, name='bias_{:d}'.format(ii))) self.act_layers.append(LeakyReLU(name='lrelu_{:d}'.format(ii)))
def __init__(self, fmaps, **kwargs): super(FromRGB, self).__init__(**kwargs) self.fmaps = fmaps self.conv = ResizeConv2D(fmaps=self.fmaps, kernel=1, gain=1.0, lrmul=1.0, up=False, down=False, resample_kernel=None, name='conv') self.apply_bias = Bias(lrmul=1.0, name='bias') self.leaky_relu = LeakyReLU(name='lrelu')
def __init__(self, n_f0, n_f1, res, **kwargs): super(DiscriminatorLastBlock, self).__init__(**kwargs) self.gain = 1.0 self.lrmul = 1.0 self.n_f0 = n_f0 self.n_f1 = n_f1 self.res = res self.minibatch_std = MinibatchStd(group_size=4, num_new_features=1, name='minibatchstd') # conv_0 self.conv_0 = ResizeConv2D(fmaps=self.n_f0, kernel=3, gain=self.gain, lrmul=self.lrmul, up=False, down=False, resample_kernel=None, name='conv_0') self.apply_bias_0 = Bias(self.lrmul, name='bias_0') self.leaky_relu_0 = LeakyReLU(name='lrelu_0') # dense_1 self.dense_1 = Dense(self.n_f1, gain=self.gain, lrmul=self.lrmul, name='dense_1') self.apply_bias_1 = Bias(self.lrmul, name='bias_1') self.leaky_relu_1 = LeakyReLU(name='lrelu_1')
def __init__(self, in_ch, **kwargs): super(ToRGB, self).__init__(**kwargs) self.in_ch = in_ch self.conv = FusedModConv(fmaps=3, kernel=1, gain=1.0, lrmul=1.0, style_fmaps=self.in_ch, demodulate=False, up=False, down=False, resample_kernel=None, name='conv') self.apply_bias = Bias(lrmul=1.0, name='bias')
def __init__(self, in_ch, fmaps, res, **kwargs): super(SynthesisBlock, self).__init__(**kwargs) self.in_ch = in_ch self.fmaps = fmaps self.res = res self.gain = 1.0 self.lrmul = 1.0 # conv0 up self.conv_0 = FusedModConv(fmaps=self.fmaps, kernel=3, gain=self.gain, lrmul=self.lrmul, style_fmaps=self.in_ch, demodulate=True, up=True, down=False, resample_kernel=[1, 3, 3, 1], name='conv_0') self.apply_noise_0 = Noise(name='noise_0') self.apply_bias_0 = Bias(lrmul=self.lrmul, name='bias_0') self.leaky_relu_0 = LeakyReLU(name='lrelu_0') # conv block self.conv_1 = FusedModConv(fmaps=self.fmaps, kernel=3, gain=self.gain, lrmul=self.lrmul, style_fmaps=self.fmaps, demodulate=True, up=False, down=False, resample_kernel=[1, 3, 3, 1], name='conv_1') self.apply_noise_1 = Noise(name='noise_1') self.apply_bias_1 = Bias(lrmul=self.lrmul, name='bias_1') self.leaky_relu_1 = LeakyReLU(name='lrelu_1')
def __init__(self, n_f0, n_f1, res, **kwargs): super(DiscriminatorBlock, self).__init__(**kwargs) self.gain = 1.0 self.lrmul = 1.0 self.n_f0 = n_f0 self.n_f1 = n_f1 self.res = res self.resnet_scale = 1. / np.sqrt(2.) # conv_0 self.conv_0 = ResizeConv2D(fmaps=self.n_f0, kernel=3, gain=self.gain, lrmul=self.lrmul, up=False, down=False, resample_kernel=None, name='conv_0') self.apply_bias_0 = Bias(self.lrmul, name='bias_0') self.leaky_relu_0 = LeakyReLU(name='lrelu_0') # conv_1 down self.conv_1 = ResizeConv2D(fmaps=self.n_f1, kernel=3, gain=self.gain, lrmul=self.lrmul, up=False, down=True, resample_kernel=[1, 3, 3, 1], name='conv_1') self.apply_bias_1 = Bias(self.lrmul, name='bias_1') self.leaky_relu_1 = LeakyReLU(name='lrelu_1') # resnet skip self.conv_skip = ResizeConv2D(fmaps=self.n_f1, kernel=1, gain=self.gain, lrmul=self.lrmul, up=False, down=True, resample_kernel=[1, 3, 3, 1], name='skip')
def __init__(self, fmaps, res, **kwargs): super(SynthesisConstBlock, self).__init__(**kwargs) assert res == 4 self.res = res self.fmaps = fmaps self.gain = 1.0 self.lrmul = 1.0 # conv block self.conv = FusedModConv(fmaps=self.fmaps, kernel=3, gain=self.gain, lrmul=self.lrmul, style_fmaps=self.fmaps, demodulate=True, up=False, down=False, resample_kernel=[1, 3, 3, 1], name='conv') self.apply_noise = Noise(name='noise') self.apply_bias = Bias(lrmul=self.lrmul, name='bias') self.leaky_relu = LeakyReLU(name='lrelu')
def __init__(self, d_params, **kwargs): super(Discriminator, self).__init__(**kwargs) # discriminator's (resolutions and featuremaps) are reversed against generator's self.labels_dim = d_params['labels_dim'] self.r_resolutions = d_params['resolutions'][::-1] self.r_featuremaps = d_params['featuremaps'][::-1] # stack discriminator blocks res0, n_f0 = self.r_resolutions[0], self.r_featuremaps[0] self.initial_fromrgb = FromRGB(fmaps=n_f0, name='{:d}x{:d}/FromRGB'.format(res0, res0)) self.blocks = list() for index, (res0, n_f0) in enumerate(zip(self.r_resolutions[:-1], self.r_featuremaps[:-1])): n_f1 = self.r_featuremaps[index + 1] self.blocks.append(DiscriminatorBlock(n_f0=n_f0, n_f1=n_f1, res=res0, name='{:d}x{:d}'.format(res0, res0))) # set last discriminator block res = self.r_resolutions[-1] n_f0, n_f1 = self.r_featuremaps[-2], self.r_featuremaps[-1] self.last_block = DiscriminatorLastBlock(n_f0, n_f1, res, name='{:d}x{:d}'.format(res, res)) # set last dense layer self.last_dense = Dense(max(self.labels_dim, 1), gain=1.0, lrmul=1.0, name='last_dense') self.last_bias = Bias(lrmul=1.0, name='last_bias')