def network(self, image, batch_size, update_collection):
     from core.resnet import block, ops
     if self.format == 'NHWC':
         image = tf.transpose(image, [0, 3, 1, 2])  # NHWC to NCHW
     h0 = lrelu(
         ops.conv2d.Conv2D(self.prefix + 'h0_conv', 3, self.dim, 3, image))
     h1 = block.ResidualBlock(self.prefix + 'res1',
                              self.dim,
                              2 * self.dim,
                              3,
                              h0,
                              resample='down')
     h2 = block.ResidualBlock(self.prefix + 'res2',
                              2 * self.dim,
                              4 * self.dim,
                              3,
                              h1,
                              resample='down')
     h3 = block.ResidualBlock(self.prefix + 'res3',
                              4 * self.dim,
                              8 * self.dim,
                              3,
                              h2,
                              resample='down')
     h4 = block.ResidualBlock(self.prefix + 'res4',
                              8 * self.dim,
                              8 * self.dim,
                              3,
                              h3,
                              resample='down')
     h4 = tf.reshape(h4, [-1, 4 * 4 * 8 * self.dim])
     hF = linear(h4, self.o_dim, self.prefix + 'h5_lin')
     return {'h0': h0, 'h1': h1, 'h2': h2, 'h3': h3, 'h4': h4, 'hF': hF}
    def network(self, seed, batch_size, update_collection):
        from core.resnet import block, ops
        s1, s2, s4, s8, s16, s32 = conv_sizes(self.output_size,
                                              layers=5,
                                              stride=2)
        # project `z` and reshape
        z_ = linear(seed, self.dim * 16 * s32 * s32, self.prefix + 'h0_lin')
        h0 = tf.reshape(z_, [-1, self.dim * 16, s32, s32])  # NCHW format

        h1 = block.ResidualBlock(self.prefix + 'res1',
                                 16 * self.dim,
                                 8 * self.dim,
                                 3,
                                 h0,
                                 resample='up')
        h2 = block.ResidualBlock(self.prefix + 'res2',
                                 8 * self.dim,
                                 4 * self.dim,
                                 3,
                                 h1,
                                 resample='up')
        h3 = block.ResidualBlock(self.prefix + 'res3',
                                 4 * self.dim,
                                 2 * self.dim,
                                 3,
                                 h2,
                                 resample='up')
        h4 = block.ResidualBlock(self.prefix + 'res4',
                                 2 * self.dim,
                                 self.dim,
                                 3,
                                 h3,
                                 resample='up')

        h4 = ops.batchnorm.Batchnorm('g_h4', [0, 2, 3], h4)
        h4 = tf.nn.relu(h4)
        #                h5 = lib.ops.conv2d.Conv2D('g_h5', dim, 3, 3, h4)
        if self.format == 'NHWC':
            h4 = tf.transpose(h4, [0, 2, 3, 1])  # NCHW to NHWC
        h5 = deconv2d(h4,
                      self.data_format(batch_size, s1, s1, self.c_dim),
                      name=self.prefix + 'g_h5')
        return tf.nn.sigmoid(h5)
    def network(self, seed, y, batch_size, update_collection):
        from core.resnet import block, ops
        s1, s2, s4, s8, s16, s32 = conv_sizes(self.output_size,
                                              layers=5,
                                              stride=2)
        # project `z` and reshape
        if self.output_size == 64:
            s32 = 4

        z_ = linear(seed, self.dim * 16 * s32 * s32, self.prefix + 'h0_lin')
        h0 = tf.reshape(z_, [-1, self.dim * 16, s32, s32])  # NCHW format
        if self.output_size == 64:
            h0_bis = h0
        else:
            h0_bis = block.ResidualBlock(self.prefix + 'res0_bis',
                                         16 * self.dim,
                                         16 * self.dim,
                                         3,
                                         h0,
                                         y=y,
                                         num_classes=self.num_classes,
                                         resample='up',
                                         mode='cond_batchnorm')
        h1 = block.ResidualBlock(self.prefix + 'res1',
                                 16 * self.dim,
                                 8 * self.dim,
                                 3,
                                 h0_bis,
                                 y=y,
                                 num_classes=self.num_classes,
                                 resample='up',
                                 mode='cond_batchnorm')
        h2 = block.ResidualBlock(self.prefix + 'res2',
                                 8 * self.dim,
                                 4 * self.dim,
                                 3,
                                 h1,
                                 y=y,
                                 num_classes=self.num_classes,
                                 resample='up',
                                 mode='cond_batchnorm')
        h3 = block.ResidualBlock(self.prefix + 'res3',
                                 4 * self.dim,
                                 2 * self.dim,
                                 3,
                                 h2,
                                 y=y,
                                 num_classes=self.num_classes,
                                 resample='up',
                                 mode='cond_batchnorm')
        h4 = block.ResidualBlock(self.prefix + 'res4',
                                 2 * self.dim,
                                 self.dim,
                                 3,
                                 h3,
                                 y=y,
                                 num_classes=self.num_classes,
                                 resample='up',
                                 mode='cond_batchnorm')

        h4 = ops.batchnorm.Batchnorm('g_h4', [0, 2, 3], h4)
        h4 = tf.nn.relu(h4)
        if self.format == 'NHWC':
            h4 = tf.transpose(h4, [0, 2, 3, 1])  # NCHW to NHWC
        h5 = deconv2d(h4,
                      self.data_format(batch_size, s1, s1, self.c_dim),
                      k_h=3,
                      k_w=3,
                      d_h=1,
                      d_w=1,
                      name=self.prefix + 'g_h5')
        return tf.nn.sigmoid(h5)
    def network(self, image, batch_size, update_collection, y):
        from core.resnet import block, ops
        if self.format == 'NHWC':
            image = tf.transpose(image, [0, 3, 1, 2])  # NHWC to NCHW
        h0 = lrelu(
            ops.conv2d.Conv2D(
                self.prefix + 'h0_conv',
                3,
                self.dim,
                3,
                image,
                update_collection=update_collection,
                with_sn=self.with_sn,
                with_learnable_sn_scale=self.with_learnable_sn_scale))
        h1 = block.ResidualBlock(
            self.prefix + 'res1',
            self.dim,
            2 * self.dim,
            3,
            h0,
            resample='down',
            update_collection=update_collection,
            with_sn=self.with_sn,
            with_learnable_sn_scale=self.with_learnable_sn_scale)
        h2 = block.ResidualBlock(
            self.prefix + 'res2',
            2 * self.dim,
            4 * self.dim,
            3,
            h1,
            resample='down',
            update_collection=update_collection,
            with_sn=self.with_sn,
            with_learnable_sn_scale=self.with_learnable_sn_scale)
        h3 = block.ResidualBlock(
            self.prefix + 'res3',
            4 * self.dim,
            8 * self.dim,
            3,
            h2,
            resample='down',
            update_collection=update_collection,
            with_sn=self.with_sn,
            with_learnable_sn_scale=self.with_learnable_sn_scale)
        h4 = block.ResidualBlock(
            self.prefix + 'res4',
            8 * self.dim,
            16 * self.dim,
            3,
            h3,
            resample='down',
            update_collection=update_collection,
            with_sn=self.with_sn,
            with_learnable_sn_scale=self.with_learnable_sn_scale)
        if image.get_shape().as_list()[2] == 64:
            h4_bis = h4
        else:
            h4_bis = block.ResidualBlock(
                self.prefix + 'res4_bis',
                16 * self.dim,
                16 * self.dim,
                3,
                h4,
                resample=None,
                update_collection=update_collection,
                with_sn=self.with_sn,
                with_learnable_sn_scale=self.with_learnable_sn_scale)

        h4_bis = lrelu(h4_bis)
        h4_bis = tf.reduce_sum(h4_bis, axis=[2, 3])
        hF = linear(h4_bis,
                    self.o_dim,
                    self.prefix + 'h5_lin',
                    update_collection=update_collection,
                    with_sn=self.with_sn,
                    with_learnable_sn_scale=self.with_learnable_sn_scale)
        if not y is None:
            w_y = linear_one_hot(
                y,
                self.o_dim,
                self.num_classes,
                name=self.prefix + "Linear_one_hot",
                update_collection=update_collection,
                with_sn=self.with_sn,
                with_learnable_sn_scale=self.with_learnable_sn_scale)

            hF += tf.reduce_sum(w_y * hF, axis=1, keepdims=True)

        return {'h0': h0, 'h1': h1, 'h2': h2, 'h3': h3, 'h4': h4, 'hF': hF}