Example #1
0
def test_gan_custom_layer_graph():
    z_shape = (1, 8, 8)
    z = Input(shape=z_shape, name='z')
    gen_cond = Input(shape=(1, 8, 8), name='gen_cond')

    inputs = [z, gen_cond]
    gen_input = merge(inputs, mode='concat', concat_axis=1)
    gen_output = Convolution2D(1, 2, 2, activation='relu',
                               name='g1',
                               border_mode='same')(gen_input)
    generator = Container(inputs, gen_output)

    f, r = Input(z_shape, name='fake'), Input(z_shape, name='real')
    inputs = [f, r]
    dis_input = merge(inputs, mode='concat', concat_axis=0)
    dis_conv = Convolution2D(5, 2, 2, name='d1', activation='relu')(dis_input)
    dis_flatten = Flatten()(dis_conv)
    dis = Dense(1, activation='sigmoid')(dis_flatten)
    discriminator = Container(inputs, gan_outputs(dis))

    gan = GAN(generator, discriminator, z_shape=z_shape, real_shape=z_shape)
    gan.build('adam', 'adam', gan_binary_crossentropy)
    fn = gan.compile_custom_layers(['g1', 'd1'])
    z = np.random.uniform(-1, 1, (64,) + z_shape)
    real = np.random.uniform(-1, 1, (64,) + z_shape)
    cond = np.random.uniform(-1, 1, (64,) + z_shape)
    print(z.shape)
    print(real.shape)
    print(cond.shape)
    fn({'z': z, 'gen_cond': cond, 'real': real})
Example #2
0
def test_gan_utility_funcs(simple_gan: GAN):
    simple_gan.build('adam', 'adam', gan_binary_crossentropy)
    simple_gan.compile()
    xy_shp = simple_gan_z_shape[1:]
    x = np.zeros(xy_shp, dtype=np.float32)
    y = np.zeros(xy_shp, dtype=np.float32)
    simple_gan.interpolate(x, y)

    z_point = simple_gan.random_z_point()
    neighbors = simple_gan.neighborhood(z_point, std=0.05)

    diff = np.stack([neighbors[0]]*len(neighbors)) - neighbors
    assert np.abs(diff).mean() < 0.1
Example #3
0
def mogan(self, gan: GAN, loss_fn, d_optimizer, name="mogan",
          gan_objective=binary_crossentropy, gan_regulizer=None,
          cond_true_ndim=4):
    assert len(gan.conditionals) >= 1

    g_dummy_opt = SGD()
    d_optimizer = d_optimizer
    v = gan.build(g_dummy_opt, d_optimizer, gan_objective)
    del v['g_updates']

    cond_true = K.placeholder(ndim=cond_true_ndim)
    inputs = copy(gan.graph.inputs)
    inputs['cond_true'] = cond_true

    cond_loss = loss_fn(cond_true, v.g_outmap)

    metrics = {
        "cond_loss": cond_loss.mean(),
        "d_loss": v.d_loss,
        "g_loss": v.g_loss,
    }

    params = flatten([n.trainable_weights
                      for n in gan.get_generator_nodes().values()])

    return MultipleObjectives(
        name, inputs, metrics=metrics, params=params,
        objectives={'g_loss': v['g_loss'], 'cond_loss': cond_loss},
        additional_updates=v['d_updates'] + gan.updates)
Example #4
0
def test_gan_graph():
    z_shape = (1, 8, 8)
    z = Input(shape=z_shape, name='z')
    gen_cond = Input(shape=(1, 8, 8), name='gen_cond')

    inputs = [z, gen_cond]
    gen_input = merge(inputs, mode='concat', concat_axis=1)
    gen_output = Convolution2D(10, 2, 2, activation='relu',
                               border_mode='same')(gen_input)
    generator = Container(inputs, gen_output)

    f, r = Input(z_shape, name='f'), Input(z_shape, name='r')
    inputs = [f, r]
    dis_input = merge(inputs, mode='concat', concat_axis=1)
    dis_conv = Convolution2D(5, 2, 2, activation='relu')(dis_input)
    dis_flatten = Flatten()(dis_conv)
    dis = Dense(1, activation='sigmoid')(dis_flatten)
    discriminator = Container(inputs, gan_outputs(dis))

    gan = GAN(generator, discriminator, z_shape=z_shape, real_shape=z_shape)
    gan.build('adam', 'adam', gan_binary_crossentropy)
    gan.compile()
    gan.generate({'gen_cond': np.zeros((64,) + z_shape)}, nb_samples=64)
Example #5
0
def mogan(self,
          gan: GAN,
          loss_fn,
          d_optimizer,
          name="mogan",
          gan_objective=binary_crossentropy,
          gan_regulizer=None,
          cond_true_ndim=4):
    assert len(gan.conditionals) >= 1

    g_dummy_opt = SGD()
    d_optimizer = d_optimizer
    v = gan.build(g_dummy_opt, d_optimizer, gan_objective)
    del v['g_updates']

    cond_true = K.placeholder(ndim=cond_true_ndim)
    inputs = copy(gan.graph.inputs)
    inputs['cond_true'] = cond_true

    cond_loss = loss_fn(cond_true, v.g_outmap)

    metrics = {
        "cond_loss": cond_loss.mean(),
        "d_loss": v.d_loss,
        "g_loss": v.g_loss,
    }

    params = flatten(
        [n.trainable_weights for n in gan.get_generator_nodes().values()])

    return MultipleObjectives(name,
                              inputs,
                              metrics=metrics,
                              params=params,
                              objectives={
                                  'g_loss': v['g_loss'],
                                  'cond_loss': cond_loss
                              },
                              additional_updates=v['d_updates'] + gan.updates)
Example #6
0
def test_mask_blending_generator():
    nb_driver = 20

    def driver(z):
        return Dense(nb_driver)(z)

    def mask_generator(x):
        return sequential([
            Dense(16),
            Reshape((1, 4, 4)),
            UpSampling2D((16, 16))
        ])(x)

    def merge_mask(subsample):
        def call(x):
            if subsample:
                x = MaxPooling2D(subsample)(x)
            return Convolution2D(1, 3, 3, border_mode='same')(x)
        return call

    def light_generator(ins):
        seq = sequential([
            Convolution2D(1, 3, 3, border_mode='same')
        ])(concat(ins))
        return UpSampling2D((4, 4))(seq), UpSampling2D((4, 4))(seq),

    def offset_front(x):
        return sequential([
            Dense(16),
            Reshape((1, 4, 4)),
            UpSampling2D((4, 4))
        ])(concat(x))

    def offset_middle(x):
        return UpSampling2D()(concat(x))

    def offset_back(x):
        feature_map = sequential([
            UpSampling2D(),
        ])(concat(x))
        return feature_map, Convolution2D(1, 3, 3,
                                          border_mode='same')(feature_map)

    def mask_post(x):
        return sequential([
            Convolution2D(1, 3, 3, border_mode='same')
        ])(concat(x))

    def mask_weight_blending(x):
        return sequential([
            Flatten(),
            Dense(1),
        ])(x)

    def discriminator(x):
        return gan_outputs(sequential([
            Flatten(),
            Dense(1),
        ])(concat(x)), fake_for_gen=(0, 10), fake_for_dis=(0, 10),
                           real=(10, 20))

    gen = mask_blending_generator(
        mask_driver=driver,
        mask_generator=mask_generator,
        light_merge_mask16=merge_mask(None),
        offset_merge_light16=merge_mask((4, 4)),
        offset_merge_mask16=merge_mask(None),
        offset_merge_mask32=merge_mask(None),
        lighting_generator=light_generator,
        offset_front=offset_front,
        offset_middle=offset_middle,
        offset_back=offset_back,
        mask_weight_blending32=mask_weight_blending,
        mask_weight_blending64=mask_weight_blending,
        mask_postprocess=mask_post,
        z_for_driver=(0, 10),
        z_for_offset=(10, 20),
        z_for_bits=(20, 32),
    )
    z_shape = (32, )
    real_shape = (1, 64, 64)
    gan = GAN(gen, discriminator, z_shape, real_shape)
    gan.build(Adam(), Adam(), gan_binary_crossentropy)
    for l in gan.gen_layers:
        print("{}: {}, {}".format(
            l.name, l.output_shape, getattr(l, 'regularizers', [])))
    bs = 10
    z_in = np.random.sample((bs,) + z_shape)
    gan.compile_generate()
    gan.generate({'z': z_in})
Example #7
0
def test_mask_blending_generator():
    nb_driver = 20

    def driver(z):
        return Dense(nb_driver)(z)

    def mask_generator(x):
        return sequential(
            [Dense(16), Reshape((1, 4, 4)),
             UpSampling2D((16, 16))])(x)

    def merge_mask(subsample):
        def call(x):
            if subsample:
                x = MaxPooling2D(subsample)(x)
            return Convolution2D(1, 3, 3, border_mode='same')(x)

        return call

    def light_generator(ins):
        seq = sequential([Convolution2D(1, 3, 3,
                                        border_mode='same')])(concat(ins))
        return UpSampling2D((4, 4))(seq), UpSampling2D((4, 4))(seq),

    def offset_front(x):
        return sequential(
            [Dense(16), Reshape((1, 4, 4)),
             UpSampling2D((4, 4))])(concat(x))

    def offset_middle(x):
        return UpSampling2D()(concat(x))

    def offset_back(x):
        feature_map = sequential([
            UpSampling2D(),
        ])(concat(x))
        return feature_map, Convolution2D(1, 3, 3,
                                          border_mode='same')(feature_map)

    def mask_post(x):
        return sequential([Convolution2D(1, 3, 3,
                                         border_mode='same')])(concat(x))

    def mask_weight_blending(x):
        return sequential([
            Flatten(),
            Dense(1),
        ])(x)

    def discriminator(x):
        return gan_outputs(sequential([
            Flatten(),
            Dense(1),
        ])(concat(x)),
                           fake_for_gen=(0, 10),
                           fake_for_dis=(0, 10),
                           real=(10, 20))

    gen = mask_blending_generator(
        mask_driver=driver,
        mask_generator=mask_generator,
        light_merge_mask16=merge_mask(None),
        offset_merge_light16=merge_mask((4, 4)),
        offset_merge_mask16=merge_mask(None),
        offset_merge_mask32=merge_mask(None),
        lighting_generator=light_generator,
        offset_front=offset_front,
        offset_middle=offset_middle,
        offset_back=offset_back,
        mask_weight_blending32=mask_weight_blending,
        mask_weight_blending64=mask_weight_blending,
        mask_postprocess=mask_post,
        z_for_driver=(0, 10),
        z_for_offset=(10, 20),
        z_for_bits=(20, 32),
    )
    z_shape = (32, )
    real_shape = (1, 64, 64)
    gan = GAN(gen, discriminator, z_shape, real_shape)
    gan.build(Adam(), Adam(), gan_binary_crossentropy)
    for l in gan.gen_layers:
        print("{}: {}, {}".format(l.name, l.output_shape,
                                  getattr(l, 'regularizers', [])))
    bs = 10
    z_in = np.random.sample((bs, ) + z_shape)
    gan.compile_generate()
    gan.generate({'z': z_in})