Esempio n. 1
0
        return (keras.losses.BinaryCrossentropy(from_logits=True)
                (tf.zeros_like(fake), real - tf.reduce_mean(fake)) +
                keras.losses.BinaryCrossentropy(from_logits=True)
                (tf.ones_like(real), fake - tf.reduce_mean(real))) / 2.0

    def _dis_loss(self, real, fake):
        return (keras.losses.BinaryCrossentropy(from_logits=True)
                (tf.ones_like(real), real - tf.reduce_mean(fake)) +
                keras.losses.BinaryCrossentropy(from_logits=True)
                (tf.zeros_like(fake), fake - tf.reduce_mean(real))) / 2.0


if __name__ == "__main__":
    k = 64
    anime_path = f"./cat{k}.npy"
    anime_imgs = np.load(anime_path)
    anime_dataset = process_numpy(anime_imgs, batch_size=32)

    ema = EMACallback()
    show = ShowCallback()
    model = RaGAN()
    model.build(anime_imgs.shape[1:])
    model.compile(d_optimizer=tfa.optimizers.AdamW(learning_rate=1e-4,
                                                   weight_decay=5e-5),
                  g_optimizer=tfa.optimizers.AdamW(learning_rate=1e-4,
                                                   weight_decay=5e-5),
                  loss=keras.losses.BinaryCrossentropy(from_logits=True))

    model.fit(anime_dataset, epochs=10, callbacks=[ema, show])
    show.make_gif("ragan.gif")
Esempio n. 2
0
    o = keras.layers.LeakyReLU(0.2)(o)

    for l, s in layer_dict.items():
        if l == img_shape[0] // 2:
            break
        o = keras.layers.Conv2DTranspose(filters=filter_num *
                                         layer_dict[o.shape[1]],
                                         kernel_size=1,
                                         strides=2,
                                         padding="SAME")(o)
        o = keras.layers.BatchNormalization()(o)
        o = keras.layers.LeakyReLU(0.2)(o)

    o = keras.layers.Conv2DTranspose(filters=256,
                                     kernel_size=1,
                                     strides=2,
                                     padding="SAME")(o)
    return keras.Model(inputs=inputs, outputs=o)


dataset = process_numpy(img_data, seg_data, batch_size=8)
model = get_model()
model.summary()
keras.utils.plot_model(model, show_shapes=True)
model.compile(
    optimizer=keras.optimizers.Adam(learning_rate=1e-4),
    loss=keras.losses.SparseCategoricalCrossentropy(from_logits=True),
    metrics=CustomIoU(num_classes=256))
model.fit(dataset, epochs=20)
show(seg_data[:16], model(img_data[:16]))
Esempio n. 3
0
import tensorflow as tf
import tensorflow_addons as tfa


from utils import EMACallback, process_directory, ShowCallback, process_numpy, StyleShowCallback, PathCallback
from hrgan import HrGAN
from pergan import PerGAN
from dcgan import DCGAN
from stylegan2 import StyleGAN2
from stylegan import StyleGAN

# anime_path = "E:/project/data/process/picture/seeprettyface_anime_face/anime_face"
# anime_dataset = process_directory(anime_path, batch_size=16)

anime_path = "E:/project/data/process/picture/anime64.npy"
anime_dataset = process_numpy(np.load(anime_path), batch_size=8)

ema = EMACallback()
# show = ShowCallback()
show = StyleShowCallback()
path = PathCallback()

model = HrGAN(latent_dim=128, filter_num=8)
model.build((64, 64, 3))
model.compile(d_optimizer=tfa.optimizers.AdamW(learning_rate=1e-4, weight_decay=5e-5, beta_1=0., beta_2=0.99),
              g_optimizer=tfa.optimizers.AdamW(learning_rate=1e-4, weight_decay=5e-5, beta_1=0., beta_2=0.99),
              loss=keras.losses.MeanSquaredError())
model.summary()
keras.utils.plot_model(model.gen, show_shapes=True, to_file="gen.png", expand_nested=True)
keras.utils.plot_model(model.dis, show_shapes=True, to_file="dis.png", expand_nested=True)
model.fit(anime_dataset, epochs=50, callbacks=[ema, show, path])