Exemplo n.º 1
0
def generate(cnt):
    shape = (64, 64, 3)
    gen = build_gen(shape)
    gen.compile(optimizer='sgd', loss='mse')
    load_weights(gen, 'snapshots/9.gen.hdf5')

    for i in range(1, 21):
        generated = gen.predict(binary_noise(64))

        animf = os.path.join('GENERATED', "КОРТИНОЧКА{}.png".format(i))
        dump_batch(generated, 8, animf)
Exemplo n.º 2
0
def generate( genw, cnt ):
    shape = (Args.sz, Args.sz, 3)
    gen = build_gen( shape )
    gen.compile(optimizer='sgd', loss='mse')
    load_weights(gen, Args.genw)

    generated = gen.predict(binary_noise(Args.batch_sz))
    generated = denormalize4gan(generated)

    for i in range(cnt):
        ofname = "{:04d}.png".format(i)
        scipy.misc.imsave( ofname, generated[i] )
Exemplo n.º 3
0
def generate(genw, cnt):
    shape = (Args.sz, Args.sz, 3)
    gen = build_gen(shape)
    gen.compile(optimizer='sgd', loss='mse')
    load_weights(gen, Args.genw)

    generated = gen.predict(binary_noise(Args.batch_sz))
    # Unoffset, in batch.
    # Must convert back to unit8 to stop color distortion.
    generated = denormalize4gan(generated)

    for i in range(cnt):
        ofname = "tmp/{:04d}.png".format(i)
        scipy.misc.imsave(ofname, generated[i])
Exemplo n.º 4
0
def build_networks():
    shape = (64, 64, 3)
    dopt = Adam(lr=0.0002, beta_1=0.5)
    opt = Adam(lr=0.0001, beta_1=0.5)

    gen = build_gen(shape)
    gen.compile(optimizer=opt, loss='binary_crossentropy')
    gen.summary()

    disc = build_discriminator(shape)
    disc.compile(optimizer=dopt, loss='binary_crossentropy')
    disc.summary()

    noise = Input(shape=(1, 1, 100))
    gened = gen(noise)
    result = disc(gened)
    gan = models.Model(inputs=noise, outputs=result)
    gan.compile(optimizer=opt, loss='binary_crossentropy')
    gan.summary()

    return gen, disc, gan
Exemplo n.º 5
0
def train_autoenc(dataf):
    '''
    Train an autoencoder first to see if your network is large enough.
    '''
    f = h5py.File(dataf, 'r')
    faces = f.get('faces')

    opt = Adam(lr=0.001)

    shape = (Args.sz, Args.sz, 3)
    enc = build_enc(shape)
    enc.compile(optimizer=opt, loss='mse')
    enc.summary()

    # generator part
    gen = build_gen(shape)
    # generator is not directly trained. Optimizer and loss doesn't matter too much.
    gen.compile(optimizer=opt, loss='mse')
    gen.summary()

    face = Input(shape=shape)
    vector = enc(face)
    recons = gen(vector)
    autoenc = models.Model(inputs=face, outputs=recons)
    autoenc.compile(optimizer=opt, loss='mse')

    epoch = 0
    while True :
        for i in range(10) :
            reals = sample_faces( faces  )
            fakes, noises = sample_fake( gen )
            loss = autoenc.train_on_batch( reals, reals )
            epoch += 1
            print(epoch, loss)
        fakes = autoenc.predict(reals)
        dump_batch(fakes, 4, "tmp/fakes.png")
        dump_batch(reals, 4, "tmp/reals.png")
    gen.save_weights(Args.genw)
    enc.save_weights(Args.discw)
    print("Saved", Args.genw, Args.discw)
Exemplo n.º 6
0
def build_networks():
    shape = (Args.sz, Args.sz, 3)

    dopt = Adam(lr=0.0002, beta_1=Args.adam_beta)
    opt  = Adam(lr=0.0001, beta_1=Args.adam_beta)

    # generator part
    gen = build_gen( shape )
    gen.compile(optimizer=opt, loss='binary_crossentropy')
    gen.summary()

    # discriminator part
    disc = build_discriminator( shape )
    disc.compile(optimizer=dopt, loss='binary_crossentropy')
    disc.summary()

    noise = Input( shape=Args.noise_shape )
    gened = gen( noise )
    result = disc( gened )
    gan = models.Model( inputs=noise, outputs=result )
    gan.compile(optimizer=opt, loss='binary_crossentropy')
    gan.summary()

    return gen, disc, gan
Exemplo n.º 7
0
def build_networks():
    shape = (Args.sz, Args.sz, 3)
    print("Args.sz:", Args.sz)
    # Learning rate is important.
    # Optimizers are important too, try experimenting them yourself to fit your dataset.
    # I recommend you read DCGAN paper.

    # Unlike gan hacks, sgd doesn't seem to work well.
    # DCGAN paper states that they used Adam for both G and D.
    # opt  = optimizers.SGD(lr=0.0001, decay=0.0, momentum=0.9, nesterov=True)
    # dopt = optimizers.SGD(lr=0.0001, decay=0.0, momentum=0.9, nesterov=True)

    # lr=0.010. Looks good, statistically (low d loss, higher g loss)
    # but too much for the G to create face.
    # If you see only one color 'flood fill' during training for about 10 batches or so,
    # training is failing. If you see only a few colors (instead of colorful noise)
    # then lr is too high for the opt and G will not have chance to form face.
    # dopt = Adam(lr=0.010, beta_1=0.5)
    # opt  = Adam(lr=0.001, beta_1=0.5)

    # vague faces @ 500
    # Still can't get higher frequency component.
    # dopt = Adam(lr=0.0010, beta_1=0.5)
    # opt  = Adam(lr=0.0001, beta_1=0.5)

    # better faces @ 500
    # but mode collapse after that, probably due to learning rate being too high.
    # opt.lr = dopt.lr / 10 works nicely. I found this with trial and error.
    # now same lr, as we are using history to train D multiple times.
    # I don't exactly understand how decay parameter in Adam works. Certainly not exponential.
    # Actually faster than exponential, when I look at the code and plot it in Excel.
    dopt = Adam(lr=0.0002, beta_1=Args.adam_beta)
    opt = Adam(lr=0.0001, beta_1=Args.adam_beta)

    # too slow
    # Another thing about LR.
    # If you make it small, it will only optimize slowly.
    # LR only has to be smaller than certain threshold that is data dependent.
    # (related to the largest gradient that prevents optimization)
    # dopt = Adam(lr=0.000010, beta_1=0.5)
    # opt  = Adam(lr=0.000001, beta_1=0.5)

    # generator part
    gen = build_gen(shape)
    # loss function doesn't seem to matter for this one, as it is not directly trained
    gen.compile(optimizer=opt, loss='binary_crossentropy')
    gen.summary()

    # discriminator part
    disc = build_discriminator(shape)
    disc.compile(optimizer=dopt, loss='binary_crossentropy')
    disc.summary()

    # GAN stack
    # https://ctmakro.github.io/site/on_learning/fast_gan_in_keras.html is the faster way.
    # Here, for simplicity, I use slower way (slower due to duplicate computation).
    noise = Input(shape=Args.noise_shape)
    gened = gen(noise)
    result = disc(gened)
    gan = models.Model(inputs=noise, outputs=result)
    gan.compile(optimizer=opt, loss='binary_crossentropy')
    gan.summary()

    return gen, disc, gan