def generator(nb_fake, nb_real, z_dim):
    for real in real_generator("/home/leon/data/tags_plain_t6.hdf5", nb_real):
        z = np.random.uniform(-1, 1, (nb_fake, z_dim)).astype(np.float32)
        yield {
            'real': 2*real - 1,
            GAN.z_name: z
        }
Example #2
0
def train_g64_d64_fine_tune():
    nb_units = 64
    generator_input_dim = 100
    nb_real = 64
    nb_fake = 128 + nb_real
    lr = 0.00002
    beta_1 = 0.5
    nb_batches_per_epoch = 100
    nb_epoch = 60
    output_dir = "models/dcgan_g64_d64_fine_tune"
    hdf5_fname = "/home/leon/data/tags_plain_t6.hdf5"

    g = dcgan_generator(nb_units, generator_input_dim)
    d = dcgan_discriminator(nb_units)

    g.load_weights("models/dcgan_g64_d64/generator.hdf5")
    d.load_weights("models/dcgan_g64_d64/fix_discriminator.hdf5")

    gan = sequential_to_gan(g,
                            d,
                            nb_real,
                            nb_fake,
                            nb_fake_for_gen=128,
                            nb_fake_for_dis=nb_real)

    save = SaveModels(
        {
            "generator_{epoch:04d}.hdf5": g,
            "discriminator_{epoch:04d}.hdf5": d
        },
        every_epoch=20,
        output_dir=output_dir)
    visual = VisualiseGAN(nb_samples=13**2,
                          output_dir=output_dir,
                          preprocess=lambda x: np.clip(x, -1, 1))

    real_z_gen = zip_real_z(real_generator(hdf5_fname, nb_real, range=(-1, 1)),
                            z_generator((nb_fake, generator_input_dim)))

    history = train_dcgan(gan,
                          Adam(lr, beta_1),
                          Adam(lr, beta_1),
                          real_z_gen,
                          nb_batches_per_epoch=nb_batches_per_epoch,
                          nb_epoch=nb_epoch,
                          callbacks=[save, visual])

    with open(os.path.join(output_dir, "history.json"), 'w+') as f:
        json.dump(history.history, f)
Example #3
0
def train_data_generator(real_hdf5_fname, batch_size, z_dim, translation=2):
    for data in real_generator(real_hdf5_fname, batch_size, range=(-1, 1)):
        z = np.random.uniform(-1, 1, (3*batch_size, z_dim)).astype(np.float32)
        size = 64
        if data.shape[-2:] != (size, size):
            translated_data = []
            for sample in data:
                random_translation = np.random.choice(np.arange(-translation, translation+1))
                offset = (data.shape[-1] - size) // 2 + random_translation
                crop = slice(offset, size+offset)
                translated_data.append(sample[:, crop, crop])
            data = np.stack(translated_data)
        yield {
            'data': data,
            'z': z,
        }
Example #4
0
def train_data_generator(real_hdf5_fname, batch_size, z_dim, translation=2):
    for data in real_generator(real_hdf5_fname, batch_size, range=(-1, 1)):
        z = np.random.uniform(-1, 1,
                              (3 * batch_size, z_dim)).astype(np.float32)
        size = 64
        if data.shape[-2:] != (size, size):
            translated_data = []
            for sample in data:
                random_translation = np.random.choice(
                    np.arange(-translation, translation + 1))
                offset = (data.shape[-1] - size) // 2 + random_translation
                crop = slice(offset, size + offset)
                translated_data.append(sample[:, crop, crop])
            data = np.stack(translated_data)
        yield {
            'data': data,
            'z': z,
        }
Example #5
0
def train_g64_d64_dct():
    nb_units = 64
    generator_input_dim = 25
    nb_real = 64
    nb_fake = 96
    lr = 0.0002
    beta_1 = 0.5
    nb_batches_per_epoch = 100
    nb_epoch = 1000
    output_dir = "models/dcgan_g64_d64_dct"
    hdf5_fname = "/home/leon/data/tags_plain_t6.hdf5"

    g = dcgan_generator(nb_units, generator_input_dim)
    d = dcgan_discriminator(nb_units)

    gan = sequential_to_gan(g, d, nb_real, nb_fake)

    save = SaveModels({
        "generator.hdf5": g,
        "discriminator.hdf5": d
    },
                      output_dir=output_dir)
    visual = VisualiseGAN(nb_samples=13**2,
                          output_dir=output_dir,
                          preprocess=lambda x: np.clip(x, -1, 1))

    real_z_gen = zip_real_z(real_generator(hdf5_fname, nb_real),
                            z_generator((nb_fake, generator_input_dim)))

    history = train_dcgan(gan,
                          Adam(lr, beta_1),
                          Adam(lr, beta_1),
                          real_z_gen,
                          nb_batches_per_epoch=nb_batches_per_epoch,
                          nb_epoch=nb_epoch,
                          callbacks=[save, visual])
    with open(os.path.join(output_dir, "history.json"), 'w+') as f:
        json.dump(history.history, f)
def generator(nb_fake, nb_real, z_dim):
    for real in real_generator("/home/leon/data/tags_plain_t6.hdf5", nb_real):
        z = np.random.uniform(-1, 1, (nb_fake, z_dim)).astype(np.float32)
        yield {'real': 2 * real - 1, GAN.z_name: z}