def __init__(self,
                 config,
                 batchsize,
                 n_classes=0,
                 softmax=True,
                 comm=None,
                 test=False):
        self.softmax = softmax
        if not test:
            self.l_emd = config.l_emd
            self.l_re = config.l_re
            self.l_patch_dis = config.l_patch_dis
            self.l_gp = config.l_gp
            self.l_per = config.l_per
            self.comm = comm
        self.config = config
        self.normalize_stat = self.config.normalize_stat if hasattr(
            self.config, "normalize_stat") else False
        if self.normalize_stat:
            self.l_re = 0

        self.batchsize = batchsize

        self.gen = SNGAN(n_classes=n_classes,
                         normalize_stat=self.normalize_stat)
        self.gen.initialize_params()  # initialize gamma and beta

        self.dim_z = 128
        params = {}
        if config.initial_z == "zero":
            params["z"] = L.Parameter(
                np.zeros((batchsize, self.dim_z)).astype("float32"))
        elif config.initial_z == "random":
            params["z"] = L.Parameter(
                np.random.normal(size=(batchsize,
                                       self.dim_z)).astype("float32"))

        gamma = self.gen.get_gamma()
        beta = self.gen.get_beta()

        for i in range(len(gamma)):
            if not config.not_initial_gamma:
                params[f"gamma{i + 1}"] = gamma[i]
            if not config.not_initial_beta:
                params[f"beta{i + 1}"] = beta[i]
        # get variables of parameters
        self.gamma = [g.W for g in gamma]
        self.beta = [b.W for b in beta]

        if config.lr_g_linear > 0:
            params["g_linear"] = self.gen.l1
        super(AdaSNGAN, self).__init__(**params)
        if not test:
            self.setup_optimizer(config.init_lr)
            if config.lr_g_linear > 0:
                self.g_linear.W.update_rule.hyperparam.alpha = config.lr_g_linear
Пример #2
0
class AdaSNGAN(AdaGenerator):
    def __init__(self, config, batchsize, comm=None, test=False):
        if not test:
            self.l_emd = config.l_emd
            self.l_re = config.l_re
            self.l_patch_dis = config.l_patch_dis
            self.l_gp = config.l_gp
            self.l_per = config.l_per
            self.comm = comm
        self.config = config
        self.normalize_stat = self.config.normalize_stat if hasattr(
            self.config, "normalize_stat") else False
        if self.normalize_stat:
            self.l_re = 0

        self.batchsize = batchsize

        self.gen = SNGAN(n_classes=config.n_classes,
                         normalize_stat=self.normalize_stat)
        self.gen.initialize_params()  # initialize gamma and beta

        self.dim_z = 128
        params = {}
        if config.initial_z == "zero":
            params["z"] = L.Parameter(
                np.zeros((batchsize, self.dim_z)).astype("float32"))
        elif config.initial_z == "random":
            params["z"] = L.Parameter(
                np.random.normal(size=(batchsize,
                                       self.dim_z)).astype("float32"))

        gamma = self.gen.get_gamma()
        beta = self.gen.get_beta()

        for i in range(len(gamma)):
            if not config.not_initial_gamma:
                params[f"gamma{i + 1}"] = gamma[i]
            if not config.not_initial_beta:
                params[f"beta{i + 1}"] = beta[i]
        # get variables of parameters
        self.gamma = [g.W for g in gamma]
        self.beta = [b.W for b in beta]

        if config.lr_g_linear > 0:
            params["g_linear"] = self.gen.l1
        super(AdaSNGAN, self).__init__(**params)
        if not test:
            self.setup_optimizer(config.init_lr)
            if config.lr_g_linear > 0:
                self.g_linear.W.update_rule.hyperparam.alpha = config.lr_g_linear

    def forward(self, z):
        h = self.gen(z.shape[0], z=z, gamma=self.gamma, beta=self.beta)
        return h

    def bs_reg(self):
        xp = self.xp
        bs_re = 0
        for g in self.gamma:
            bs_re += F.mean_squared_error(g, xp.ones(g.shape, dtype="float32"))
        for b in self.beta:
            bs_re += F.mean_squared_error(b, xp.zeros(b.shape,
                                                      dtype="float32"))

        return bs_re