def __init__(self, config, args):
        self.config = config
        for k, v in args.__dict__.items():
            setattr(self.config, k, v)
        setattr(self.config, 'save_dir', '{}_log'.format(self.config.dataset))

        disp_str = ''
        for attr in sorted(dir(self.config), key=lambda x: len(x)):
            if not attr.startswith('__'):
                disp_str += '{} : {}\n'.format(attr,
                                               getattr(self.config, attr))
        sys.stdout.write(disp_str)
        sys.stdout.flush()

        self.labeled_loader, self.unlabeled_loader, self.dev_loader, self.special_set = data.get_cifar_loaders(
            config)

        self.dis = model.Discriminative(config).cuda()
        self.ema_dis = model.Discriminative(config, ema=True).cuda()
        self.gen = model.Generator(image_size=config.image_size,
                                   noise_size=config.noise_size).cuda()
        self.enc = model.Encoder(config.image_size,
                                 noise_size=config.noise_size,
                                 output_params=True).cuda()

        # self.dis_optimizer = optim.Adam(self.dis.parameters(), lr=config.dis_lr, betas=(0.5, 0.999))
        self.dis_optimizer = optim.SGD(self.dis.parameters(),
                                       lr=config.dis_lr,
                                       momentum=config.momentum,
                                       weight_decay=config.weight_decay,
                                       nesterov=config.nesterov)
        self.gen_optimizer = optim.Adam(self.gen.parameters(),
                                        lr=config.gen_lr,
                                        betas=(0.0, 0.999))
        self.enc_optimizer = optim.Adam(self.enc.parameters(),
                                        lr=config.enc_lr,
                                        betas=(0.0, 0.999))

        self.d_criterion = nn.CrossEntropyLoss()
        if config.consistency_type == 'mse':
            self.consistency_criterion = losses.softmax_mse_loss  # nn.MSELoss()    # (size_average=False)
        elif config.consistency_type == 'kl':
            self.consistency_criterion = losses.softmax_kl_loss  # nn.KLDivLoss()  # (size_average=False)
        else:
            pass
        self.consistency_weight = 0

        if not os.path.exists(self.config.save_dir):
            os.makedirs(self.config.save_dir)

        if self.config.resume:
            pass

        log_path = os.path.join(
            self.config.save_dir,
            '{}.FM+VI.{}.txt'.format(self.config.dataset, self.config.suffix))
        self.logger = open(log_path, 'wb')
        self.logger.write(disp_str)

        print self.dis
Exemple #2
0
    def __init__(self, config, args):
        self.config = config
        for k, v in list(args.__dict__.items()):
            setattr(self.config, k, v)
        setattr(self.config, 'save_dir', '{}_log'.format(self.config.dataset))

        disp_str = ''
        for attr in sorted(dir(self.config), key=lambda x: len(x)):
            if not attr.startswith('__'):
                disp_str += '{} : {}\n'.format(attr, getattr(self.config, attr))
        sys.stdout.write(disp_str)
        sys.stdout.flush()

        self.labeled_loader, self.unlabeled_loader, self.unlabeled_loader2, self.dev_loader, self.special_set = data.get_cifar_loaders(config)

        self.dis = model.Discriminative(config).cuda()
        self.gen = model.Generator(image_size=config.image_size, noise_size=config.noise_size).cuda()
        self.enc = model.Encoder(config.image_size, noise_size=config.noise_size, output_params=True).cuda()

        self.dis_optimizer = optim.Adam(self.dis.parameters(), lr=config.dis_lr, betas=(0.5, 0.999))
        self.gen_optimizer = optim.Adam(self.gen.parameters(), lr=config.gen_lr, betas=(0.0, 0.999))
        self.enc_optimizer = optim.Adam(self.enc.parameters(), lr=config.enc_lr, betas=(0.0, 0.999))

        self.d_criterion = nn.CrossEntropyLoss()

        if not os.path.exists(self.config.save_dir):
            os.makedirs(self.config.save_dir)

        log_path = os.path.join(self.config.save_dir, '{}.FM+VI.{}.txt'.format(self.config.dataset, self.config.suffix))
        self.logger = open(log_path, 'wb')
        self.logger.write(disp_str)

        print(self.dis)
    def __init__(self, config, args):
        self.config = config
        for k, v in args.__dict__.items():
            setattr(self.config, k, v)
        setattr(self.config, 'save_dir', '{}_log'.format(self.config.dataset))

        disp_str = ''
        for attr in sorted(dir(self.config), key=lambda x: len(x)):
            if not attr.startswith('__'):
                disp_str += '{} : {}\n'.format(attr,
                                               getattr(self.config, attr))
        sys.stdout.write(disp_str)
        sys.stdout.flush()

        self.labeled_loader, self.unlabeled_loader, self.unlabeled_loader2, self.dev_loader, self.special_set = data.get_cifar_loaders(
            config)

        self.dis = model.Discriminative(config).cuda()
        self.gen = model.Generator(image_size=config.image_size,
                                   noise_size=config.noise_size).cuda()
        self.enc = model.Encoder(config.image_size,
                                 noise_size=config.noise_size,
                                 output_params=True).cuda()

        # load model    # ta
        self.load_network(self.dis, 'D', strict=False)
        self.load_network(self.gen, 'G', strict=False)
        self.load_network(self.enc, 'E', strict=False)

        if not os.path.exists(self.config.save_dir):
            os.makedirs(self.config.save_dir)

        log_path = os.path.join(
            self.config.save_dir,
            '{}.FM+VI.{}.txt'.format(self.config.dataset, self.config.suffix))
        self.logger = open(log_path, 'wb')
        self.logger.write(disp_str)

        print self.dis
    def __init__(self, config, args):
        self.config = config
        for k, v in args.__dict__.items():
            setattr(self.config, k, v)
        setattr(self.config, 'save_dir', '{}_log'.format(self.config.dataset))

        disp_str = ''
        for attr in sorted(dir(self.config), key=lambda x: len(x)):
            if not attr.startswith('__'):
                disp_str += '{} : {}\n'.format(attr,
                                               getattr(self.config, attr))
        sys.stdout.write(disp_str)
        sys.stdout.flush()

        self.labeled_loader, self.unlabeled_loader, self.dev_loader, self.special_set = data.get_cifar_loaders(
            config)

        self.dis = model.Discriminative(config).cuda()
        self.ema_dis = model.Discriminative(
            config).cuda()  # , ema=True).cuda()
        # for param in self.ema_dis.parameters():
        #     param.detach_()
        if config.gen_mode != "non":
            self.gen = model.generator(image_side=config.image_side,
                                       noise_size=config.noise_size,
                                       large=config.double_input_size,
                                       gen_mode=config.gen_mode).cuda()

        dis_para = [
            {
                'params': self.dis.parameters()
            },
        ]
        if 'm' in config.dis_mode:  # svhn: 168; cifar:192
            self.m_criterion = FocalLoss(gamma=2)

        if config.dis_double:
            self.dis_dou = model.Discriminative_out(config).cuda()
            dis_para.append({'params': self.dis_dou.parameters()})

        self.dis_optimizer = optim.Adam(dis_para,
                                        lr=config.dis_lr,
                                        betas=(0.5, 0.999))
        # self.dis_optimizer = optim.SGD(self.dis.parameters(), lr=config.dis_lr,
        #                                momentum=config.momentum,
        #                                weight_decay=config.weight_decay,
        #                                nesterov=config.nesterov)
        if hasattr(self, 'gen'):
            if config.gop == 'SGD':
                self.gen_optimizer = optim.SGD(
                    self.gen.parameters(),
                    lr=config.gen_lr,
                    momentum=config.momentum,
                    weight_decay=config.weight_decay,
                    nesterov=config.nesterov)
            else:
                self.gen_optimizer = optim.Adam(self.gen.parameters(),
                                                lr=config.gen_lr,
                                                betas=(0.0, 0.999))
        if config.gen_mode == "z2i":
            self.enc = model.Encoder(config.image_side,
                                     noise_size=config.noise_size,
                                     output_params=True).cuda()
            self.enc_optimizer = optim.Adam(self.enc.parameters(),
                                            lr=config.enc_lr,
                                            betas=(0.0, 0.999))

        self.d_criterion = nn.CrossEntropyLoss()
        if config.consistency_type == 'mse':
            self.consistency_criterion = losses.softmax_mse_loss  # F.MSELoss()    # (size_average=False)
        elif config.consistency_type == 'kl':
            self.consistency_criterion = losses.softmax_kl_loss  # nn.KLDivLoss()  # (size_average=False)
        else:
            pass
        self.consistency_weight = 0

        if not os.path.exists(self.config.save_dir):
            os.makedirs(self.config.save_dir)

        if "," in config.dis_mode or config.cd_mode_iter > 0:
            assert "," in config.dis_mode
            assert config.cd_mode_iter > 0
            self.dis_mode = config.dis_mode
            config.dis_mode = config.dis_mode.split(",")[0]

        log_path = os.path.join(
            self.config.save_dir,
            '{}.FM+VI.{}.txt'.format(self.config.dataset, self.config.suffix))
        if config.resume:
            self.logger = open(log_path, 'ab')
        else:
            self.logger = open(log_path, 'wb')
            self.logger.write(disp_str)

        # for arcface
        self.s = 30.0
        m = 0.50
        self.cos_m = math.cos(m)
        self.sin_m = math.sin(m)
        self.th = math.cos(math.pi - m)
        self.mm = math.sin(math.pi - m) * m
        # for dg start epoch
        if config.dg_start > 0:
            self.dg_flag = False
        else:
            self.dg_flag = True

        print self.dis