def get_inception_score(test_data): test_data = np.reshape(test_data, (-1, 28*28)) test_data = scale_value(test_data, [-0.5, 0.5]) batch_size = 100 test_size = test_data.shape[0] total_batch = int(test_size / batch_size) preds = [] for i in range(total_batch): offset = (i * batch_size) % (test_size) batch_xs = test_data[offset:(offset + batch_size), :] y_final = sess.run(y, feed_dict={x: batch_xs, is_training: False}) pred_softmax = softmax(y_final) preds.append(pred_softmax) preds = np.concatenate(preds, 0) scores = [] splits = 10 for i in range(splits): part = preds[(i * preds.shape[0] // splits):((i + 1) * preds.shape[0] // splits), :] kl = part * (np.log(part) - np.log(np.expand_dims(np.mean(part, 0), 0))) kl = np.mean(np.sum(kl, 1)) scores.append(np.exp(kl)) icp = (np.mean(scores) , np.std(scores)) return icp
def compute_inception_score(n): all_samples = [] for i in range(int(n / 100)): all_samples.append(session.run(samples_100)) all_samples = np.concatenate(all_samples, axis=0) all_samples = all_samples.reshape((-1, 3, 32, 32)) all_samples = scale_value(all_samples, [-1.0, 1.0]) print(all_samples.shape) return get_inception_score(all_samples)
def compute_metric(generator_model): global best_icp sample_size = 20000 noise = np.random.normal(size=(sample_size, 100)) art_images = generator_model.predict(noise) art_images = scale_value(art_images, [-1.0, 1.0]) art_images = np.transpose(art_images, (0, 3, 1, 2)) (icp_mean, icp_std) = get_inception_score(art_images) if icp_mean > best_icp: best_icp = icp_mean print('Inception score: ', icp_mean)
def generate_images(generator, image_path, epoch, cmap='gray'): xsamples = generator() xsamples = scale_value(xsamples, [0, 1]) # convert tanh output to [0, 1] for display image_path = image_path.format(epoch) write_image_grid(image_path, xsamples, cmap=cmap)
def cifar10_process(x): x = x.astype(np.float32) x = scale_value(x, [-1, 1]) #rescale to tanh compatible output return x
def mnist_process(x): x = np.reshape(x, newshape=(-1, 28, 28, 1)) x = x.astype(np.float32) x = scale_value(x, [-1.0, 1.0]) #rescale to [-1, 1] for compatible with tanh output return x
def generator_sampler(): images = dim_ordering_unfix(generator.predict(zsamples)).transpose( (0, 2, 3, 1)) images = scale_value( images, [0.0, 1.0]) #rescale tanh output to [0, 1] for display return images.reshape((10, 10, 28, 28))
def generator_sampler(): xpred = dim_ordering_unfix(generator.predict(zsamples)).transpose((0, 2, 3, 1)) xpred = scale_value(xpred, [0.0, 1.0]) return xpred.reshape((10, 10) + xpred.shape[1:])