def last_cnn(self, h, resolution, ch_in, ch_out): with nn.parameter_scope("phase_{}".format(resolution)): with nn.parameter_scope("conv1"): h = conv(h, ch_in, kernel=(3, 3), pad=(1, 1), stride=(1, 1), with_bias=not self.use_ln, use_wscale=self.use_wscale, use_he_backward=self.use_he_backward) h = LN(h, use_ln=self.use_ln) h = self.activation(h) with nn.parameter_scope("conv2"): h = conv(h, ch_out, kernel=(4, 4), pad=(0, 0), stride=(1, 1), with_bias=not self.use_ln, use_wscale=self.use_wscale, use_he_backward=self.use_he_backward) h = LN(h, use_ln=self.use_ln) h = self.activation(h) with nn.parameter_scope("linear"): h = affine(h, 1, with_bias=True, use_wscale=self.use_wscale, use_he_backward=self.use_he_backward) return h
def cnn(self, h, resolution, ch_in, ch_out): """CNN block The following operations are performed two times. 1. Conv 2. Layer normalization 3. Leaky relu """ with nn.parameter_scope("phase_{}".format(resolution)): with nn.parameter_scope("conv1"): h = conv(h, ch_in, kernel=(3, 3), pad=(1, 1), stride=(1, 1), with_bias=not self.use_ln, use_wscale=self.use_wscale, use_he_backward=self.use_he_backward) h = LN(h, use_ln=self.use_ln) h = self.activation(h) with nn.parameter_scope("conv2"): h = conv(h, ch_out, kernel=(3, 3), pad=(1, 1), stride=(1, 1), with_bias=not self.use_ln, use_wscale=self.use_wscale, use_he_backward=self.use_he_backward) h = LN(h, use_ln=self.use_ln) h = self.activation(h) h = F.average_pooling(h, kernel=(2, 2)) return h