def __init__(self, config):
		# initializing 2 generators and 2 discriminators
		# We don't use ImagePool. Perhaps later
		self.lr = float(config['lr'])
		self.image_size = int(config['image_align_size'])
		self.beta1 = float(config['beta1'])		

		self.encoder = Encoder(int(config['nef']), int(config['z_dim']), "Encoder")
		self.generator = Generator(int(config['ngf']), self.image_size, "Generator")
		self.discriminator_z = DiscriminatorZ(int(config['ndzf']), "DiscriminatorZ")
		self.discriminator = Discriminator(int(config['ndf']), "Discriminator")
		
		self.width = int(config['width'])
		self.height = int(config['height'])
		self.decay_rate = float(config['decay_rate'])


		self.batch_size = tf.placeholder(tf.int32)
		self.img_batch = tf.placeholder(tf.float32, [None, self.width, self.height, 3])
		self.age_batch = tf.placeholder(tf.float32, [None, int(config['age_segment'])])
		self.gender_batch = tf.placeholder(tf.float32, [None, int(config['gender_segment'])])
		self.prior = tf.placeholder(tf.float32, [None, int(config['z_dim'])])	
		self.sample_batch = tf.placeholder(tf.float32, [None, self.width, self.height, 3])


		self.is_peason_div = int(config['is_peason'])
		self.loss_control = float(config['loss_control'])
Example #2
0
    def __init__(self, opt):
        super(BankModel, self).__init__(opt)
        self.main_disc = Discriminator()
        self.net = GeneratorBankNet(self.main_disc, opt)

        self.main_gen_optimizer = Adam(self.net.main.parameters(),
                                       lr=opt.gen_learning_rate_main,
                                       betas=(0.5, 0.999))
        self.bank_gen_optimizer = Adam(self.net.netGA.parameters(),
                                       lr=opt.gen_learning_rate_main,
                                       betas=(0.5, 0.999))
        self.main_disc_optimizer = Adam(self.main_disc.parameters(),
                                        lr=opt.disc_learning_rate_main,
                                        betas=(0.5, 0.999))

        self.net.to(self.device)
        self.main_disc.to(self.device)
Example #3
0
class BankModel(BaseModel):
    def __init__(self, opt):
        super(BankModel, self).__init__(opt)
        self.main_disc = Discriminator()
        self.net = GeneratorBankNet(self.main_disc, opt)

        self.main_gen_optimizer = Adam(self.net.main.parameters(),
                                       lr=opt.gen_learning_rate_main,
                                       betas=(0.5, 0.999))
        self.bank_gen_optimizer = Adam(self.net.netGA.parameters(),
                                       lr=opt.gen_learning_rate_main,
                                       betas=(0.5, 0.999))
        self.main_disc_optimizer = Adam(self.main_disc.parameters(),
                                        lr=opt.disc_learning_rate_main,
                                        betas=(0.5, 0.999))

        self.net.to(self.device)
        self.main_disc.to(self.device)

    def load_pre_trained(self):
        if self.opt.pre_trained_model == 'none':
            print('No pre trained model')
        self.net.main.load_state_dict(torch.load(self.opt.pre_trained_model))
        print('%s pre trained model loaded' % self.opt.pre_trained_model)
        self.net.to(self.device)
        if self.opt.pre_disc_trained_model != 'none':
            self.main_disc.load_state_dict(
                torch.load(self.opt.pre_disc_trained_model))
            print('%s pre trained disc model loaded' %
                  self.opt.pre_disc_trained_model)
        self.main_disc.to(self.device)
Example #4
0
	def __init__(self, config):
		# initializing 2 generators and 2 discriminators
		# We don't use ImagePool. Perhaps later
		self.lr = float(config['lr'])
		self.generatorG = Generator(int(config['ngf']), "GeneratorG")
		self.generatorF = Generator(int(config['ngf']), "GeneratorF")
		self.discriminatorA = Discriminator(int(config['ndf']), "DiscriminatorA")
		self.discriminatorB = Discriminator(int(config['ndf']), "DiscriminatorB")
		self.is_peason_div = int(config['is_peason'])
		self.L1_lambda = float(config['L1_lambda'])
		self.width = int(config['width'])
		self.height = int(config['height'])
		self.img_A = tf.placeholder(tf.float32, [None, self.width, self.height, 3])
		self.img_B = tf.placeholder(tf.float32, [None, self.width, self.height, 3])
		self.meta_vector_A2B = tf.placeholder(tf.float32, [None, self.width, self.height, 1])
		self.meta_vector_B2A = tf.placeholder(tf.float32, [None, self.width, self.height, 1])
		self.sample_vector = tf.placeholder(tf.float32, [None, self.width, self.height, 3])
		self.sample_meta_data = tf.placeholder(tf.float32, [None, self.width, self.height, 1])
Example #5
0
# create optimizers
generator_g_optimizer = tf.keras.optimizers.Adam(2e-4, beta_1=0.5)
generator_f_optimizer = tf.keras.optimizers.Adam(2e-4, beta_1=0.5)

discriminator_x_optimizer = tf.keras.optimizers.Adam(2e-4, beta_1=0.5)
discriminator_y_optimizer = tf.keras.optimizers.Adam(2e-4, beta_1=0.5)

generator = Generator(EMBED_SIZE, max_len_question)
generator_g = generator.get_model()

# reinitializing
generator = Generator(EMBED_SIZE, max_len_question)
generator_f = generator.get_model()

discriminator = Discriminator(EMBED_SIZE, max_len_question)
discriminator_x = discriminator.get_model()

# reinitializing
discriminator = Discriminator(EMBED_SIZE, max_len_question)
discriminator_y = discriminator.get_model()

# test samples
test_encoded_samples = data.test_sentences


def train_step(real_x, real_y):
    # persistent is set to True because the tape is used more than
    # once to calculate the gradients.
    with tf.GradientTape(persistent=True) as tape:
        # Generator G translates X -> Y
Example #6
0
print(f"Label distribution is: {label_dist}\n")

print(f"Saving test images to samples/test.")
label_dist = save_loader(testloader, 'test')
print(f"Label distribution is: {label_dist}\n")

print(f"Evaluating train--test FID for sanity check...")
train_test_fid = evaluate_fid(paths=['samples/train', 'samples/test'])
print(f"Train--test FID is: {train_test_fid}")

#########################################
#### Build G and D ######################
#########################################

netG = Generator().to(device)
netD = Discriminator().to(device)

if args.optim == 'sgd':
    optimizer = torch.optim.SGD
    kwargs = {}
elif args.optim == 'fromage':
    optimizer = Fromage
    kwargs = {}
elif args.optim == 'adam':
    optimizer = torch.optim.Adam
    kwargs = {'betas': (0.0, 0.999), 'eps': 1e-08}
else:
    raise Exception("Unsupported optim")

optG = optimizer(netG.parameters(), lr=args.lrG, **kwargs)
optD = optimizer(netD.parameters(), lr=args.lrD, **kwargs)
Example #7
0
print(f"Saving test images to samples/test.")
label_dist = save_loader(testloader, 'test')
print(f"Label distribution is: {label_dist}\n")

print(f"Evaluating train--test FID for sanity check...")
train_test_fid = evaluate_fid(
    paths=[log_dir + 'samples/train', log_dir + 'samples/test'])
print(f"Train--test FID is: {train_test_fid}")

#########################################
#### Build G and D ######################
#########################################

netG = Generator().to(device)
netD = Discriminator().to(device)

print("Generator:")
print(f"{sum(p.numel() for p in netG.parameters())} parameters")
print(f"{len(list(netG.parameters()))} tensors")

print("\nDiscriminator:")
print(f"{sum(p.numel() for p in netD.parameters())} parameters")
print(f"{len(list(netD.parameters()))} tensors")

if args.optim == 'sgd':
    optG = torch.optim.SGD(netG.parameters(), lr=args.initial_lr)
    optD = torch.optim.SGD(netD.parameters(), lr=args.initial_lr)
elif args.optim == 'fromage':
    optG = Fromage(netG.parameters(), lr=args.initial_lr)
    optD = Fromage(netD.parameters(), lr=args.initial_lr)