Exemplo n.º 1
0
def interpolate(model, input1, input2):
    len_shape = min(input1.shape[0], input2.shape[0])
    input1 = input1[:len_shape]
    input2 = input2[:len_shape]

    z1 = model.encode(input1).numpy()
    z2 = model.encode(input2).numpy()

    decodes = defaultdict(list)
    for idx, ratio in tqdm(enumerate(np.linspace(0, 1, 10)), position=0):
        decode = dict()
        z = np.stack([slerp(ratio, r1, r2) for r1, r2 in zip(z1, z2)])
        if len(z.shape) > 2:
            z = np.reshape(z, z.shape[:2])
        z_decode = model.decode(z).numpy()

        for i in range(z_decode.shape[0]):
            decode[i] = [z_decode[i]]

        for i in range(z_decode.shape[0]):
            decodes[i] = decodes[i] + decode[i]

    imgs = []

    for idx in decodes:
        l = []
        #l += [input1[idx:idx + 1][0]]
        l += [input1]
        l += decodes[idx]
        l += [input2]

        imgs.append(l)

    return imgs
Exemplo n.º 2
0
def generate_images_like_a_batch(model, data_generator, save_dir):
    epsilon = 1e-3

    # Generate latents from the data
    original_data = next(data_generator)['images']
    if isinstance(original_data, tuple):
        original_data = original_data[0]
    latents_real = model.encode(original_data)

    # Generate random latents and interpolation t-values.
    ln = np.random.normal(size=[latents_real.shape[1]])
    latents_t = np.array([ln for _ in range(latents_real.shape[0])])
    lerp_t = np.random.uniform(size=1)[0]
    latents_e = slerp(lerp_t, latents_real, latents_t)
    images = model.decode(latents_e).numpy()

    for i, (original_image,
            synthetic_image) in tqdm(enumerate(zip(original_data, images)),
                                     position=0):
        fig_name = os.path.join(save_dir,
                                'synthetic_image_{:06d}.png'.format(i))
        image = Image.fromarray((synthetic_image * 255).astype(np.uint8),
                                mode='RGB')
        image.save(fig_name)

        fig_name = os.path.join(save_dir,
                                'original_image_{:06d}.png'.format(i))
        image = Image.fromarray((original_image * 255).astype(np.uint8),
                                mode='RGB')
        image.save(fig_name)
Exemplo n.º 3
0
    def model_random_images_generator():
        while True:
            data = next(data_generator)['images']
            ref_features = VGG16_featues_fn(data)

            # Generate latents from the data
            latents_real = model.encode(data)

            # Generate random latents and interpolation t-values.
            latents_t = np.random.normal(size=latents_real.shape)
            lerp_t = np.random.uniform(size=1)[0]

            latents_e = slerp(lerp_t + epsilon, latents_real, latents_t)
            images = model.decode(latents_e).numpy()
            images = (images * 255).astype(np.float32)

            eval_features = VGG16_featues_fn(images)

            # Calculate precision and recall.
            state = knn_precision_recall_features(
                ref_features=ref_features,
                eval_features=eval_features,
                feature_net=VGG16_model,
                nhood_sizes=[nhood_size],
                row_batch_size=row_batch_size,
                col_batch_size=col_batch_size)

            knn_precision = state['knn_precision'][0]
            knn_recall = state['knn_recall'][0]
            yield knn_recall
Exemplo n.º 4
0
    def model_random_images_generator():
        while True:
            # Generate latents from the data
            latents_real = model.encode(next(data_generator)['images'])

            # Generate random latents and interpolation t-values.
            latents_t = np.random.normal(size=latents_real.shape)
            lerp_t = np.random.uniform(size=1)[0]

            latents_e = slerp(lerp_t, latents_real, latents_t)
            images = model.decode(latents_e)  #.numpy()
            # images = (images*255).astype(np.uint8)

            yield images[:batch_size]
    def generated_predictions_generator():
        while True:
            # Generate latents from the data
            data = next(data_generator)['images']
            if data.shape[-2] < 75:
                new_size = [75, 75]
                resize_fn = lambda image: tf.image.resize(image, new_size)
                data = tf.map_fn(resize_fn, data)

            if data.shape[-1] == 1:
                data = tf.image.grayscale_to_rgb(data)

            latents_real = model.encode(data)

            # Generate random latents and interpolation t-values.
            latents_t = np.random.normal(size=latents_real.shape)
            lerp_t = np.random.uniform(size=1)[0]

            latents_e = slerp(lerp_t, latents_real, latents_t)
            images = model.decode(latents_e).numpy()
            images = (images * 255).astype(np.uint8)
            yield inception_predictions(images)[:batch_size]