Esempio n. 1
0
    def __init__(self, x_shape, z_dim, batch_norm=True):
        super().__init__()
        ch = x_shape[-1]
        self.net = ts.Sequential(
            dense(128),
            ts.ReLU(),
            dense(4 * 4 * 64),
            ts.ReLU(),
            ts.Reshape((-1, 4, 4, 64)),
            deconv(64, 4, 2, "same"),
            ts.LeakyReLU(),
            deconv(32, 4, 2, "same"),
            ts.LeakyReLU(),
            deconv(32, 4, 2, "same"),
            ts.LeakyReLU(),
            deconv(ch, 4, 2, "same"),
            ts.Sigmoid(),
        )

        # Add batchnorm post-activation (attach to activation out_hook)
        if batch_norm:
            self.net.apply(add_bn, targets=(ts.ReLU, ts.LeakyReLU))

        ut.log("Building generator...")
        self.build((1, z_dim))
        self.apply(ut.reset_parameters)
Esempio n. 2
0
  def __init__(self, x_shape, y_dim, width=1, share_dense=False,
               uncond_bias=False):
    super().__init__()
    self.y_dim = y_dim
    self.body = ts.Sequential(
        conv(32 * width, 4, 2, "same"), ts.LeakyReLU(),
        conv(32 * width, 4, 2, "same"), ts.LeakyReLU(),
        conv(64 * width, 4, 2, "same"), ts.LeakyReLU(),
        conv(64 * width, 4, 2, "same"), ts.LeakyReLU(),
        ts.Flatten(),
        )

    self.aux = ts.Sequential(
        dense(128 * width), ts.LeakyReLU(),
        )

    if share_dense:
      self.body.append(dense(128 * width), ts.LeakyReLU())
      self.aux.append(dense(128 * width), ts.LeakyReLU())

    self.head = ts.Sequential(
        dense(128 * width), ts.LeakyReLU(),
        dense(128 * width), ts.LeakyReLU(),
        dense(1, bias=uncond_bias)
        )

    for m in (self.body, self.aux, self.head):
      m.apply(ts.SpectralNorm.add, targets=ts.Affine)

    ut.log("Building label discriminator...")
    x_shape, y_shape = [1] + x_shape, (1, y_dim)
    self.build(x_shape, y_shape)
    self.apply(ut.reset_parameters)
Esempio n. 3
0
    def __init__(self,
                 x_shape,
                 y_dim,
                 width=1,
                 share_dense=False,
                 uncond_bias=False,
                 cond_bias=False,
                 mask_type="match"):
        super().__init__()
        self.y_dim = y_dim
        self.mask_type = mask_type
        self.body = ts.Sequential(
            conv(32 * width, 4, 2, "same"),
            ts.LeakyReLU(),
            conv(32 * width, 4, 2, "same"),
            ts.LeakyReLU(),
            conv(64 * width, 4, 2, "same"),
            ts.LeakyReLU(),
            conv(64 * width, 4, 2, "same"),
            ts.LeakyReLU(),
            ts.Flatten(),
        )

        if share_dense:
            self.body.append(dense(128 * width), ts.LeakyReLU())

        if mask_type == "match":
            self.neck = ts.Sequential(
                dense(128 * width),
                ts.LeakyReLU(),
                dense(128 * width),
                ts.LeakyReLU(),
            )

            self.head_uncond = dense(1, bias=uncond_bias)
            self.head_cond = dense(128 * width, bias=cond_bias)

            for m in (self.body, self.neck, self.head_uncond):
                m.apply(ts.SpectralNorm.add, targets=ts.Affine)
            add_wn(self.head_cond)
            x_shape, y_shape = [1] + x_shape, ((1, ), tf.int32)

        elif mask_type == "rank":
            self.body.append(dense(128 * width), ts.LeakyReLU(),
                             dense(128 * width), ts.LeakyReLU(),
                             dense(1 + y_dim, bias=uncond_bias))

            self.body.apply(ts.SpectralNorm.add, targets=ts.Affine)
            x_shape, y_shape = [1] + x_shape, (1, y_dim)

        ut.log("Building {} discriminator...".format(mask_type))
        self.build(x_shape, x_shape, y_shape)
        self.apply(ut.reset_parameters)
Esempio n. 4
0
  def __init__(self, x_shape, z_dim, width=1, spectral_norm=True):
    super().__init__()
    self.net = ts.Sequential(
        conv(32 * width, 4, 2, "same"), ts.LeakyReLU(),
        conv(32 * width, 4, 2, "same"), ts.LeakyReLU(),
        conv(64 * width, 4, 2, "same"), ts.LeakyReLU(),
        conv(64 * width, 4, 2, "same"), ts.LeakyReLU(),
        ts.Flatten(),
        dense(128 * width), ts.LeakyReLU(),
        dense(2 * z_dim)
        )

    if spectral_norm:
      self.net.apply(ts.SpectralNorm.add, targets=ts.Affine)

    ut.log("Building encoder...")
    self.build([1] + x_shape)
    self.apply(ut.reset_parameters)