def generate_fake_images(run_id, snapshot=None, grid_size=[1, 1], num_pngs=1, image_shrink=1, png_prefix=None, random_seed=1000, minibatch_size=8): network_pkl = misc.locate_network_pkl(run_id, snapshot) if png_prefix is None: png_prefix = misc.get_id_string_for_network_pkl(network_pkl) + '-' print('Loading network from "%s"...' % network_pkl) G, D, Gs = misc.load_network_pkl(run_id, snapshot) result_subdir = misc.create_result_subdir(config.result_dir, config.desc) for png_idx in range(num_pngs): print('Generating png %d / %d...' % (png_idx, num_pngs)) latents = misc.random_latents(np.prod(grid_size), Gs, random_state=random_state) labels = np.zeros([latents.shape[0], 0], np.float32) images = Gs.run(latents, labels, minibatch_size=minibatch_size, num_gpus=config.num_gpus, out_mul=127.5, out_add=127.5, out_shrink=image_shrink, out_dtype=np.uint8) misc.save_image_grid( images, os.path.join(result_subdir, '%s%06d.png' % (png_prefix, png_idx)), [0, 255], grid_size) open(os.path.join(result_subdir, '_done.txt'), 'wt').close()
def generate_fake_images_cond2(run_id, x, y, snapshot=None, grid_size=[1,1], num_pngs=1, image_shrink=1, png_prefix=None, random_seed=1000, minibatch_size=1): network_pkl = misc.locate_network_pkl(run_id, snapshot) if png_prefix is None: png_prefix = misc.get_id_string_for_network_pkl(network_pkl) + '-' training_set = dataset.load_dataset(data_dir=config.data_dir, shuffle_mb=0, verbose=True, **config.dataset) print('Loading network from "%s"...' % network_pkl) G, D, Gs = misc.load_network_pkl(run_id, snapshot) real, label = training_set.get_minibatch_np(num_pngs) result_subdir = misc.create_result_subdir(config.result_dir, config.desc) size=128 grid= np.zeros((3, x*256, y* 256)) image=[] for i in range(1, 18): filename = 'cond4/'+str(i)+'.png' if os.path.isfile(filename): im=Image.open(filename) im.load() im = np.asarray(im, dtype=np.float32 ) im=np.transpose(im, (2, 0, 1)) image.append(im) print(image[i-1].shape) print(len(image)) for i in range (1, x): grid[:, (i)*256:(i)*256+256, 0:256]= image[i] print(i) for j in range (i, 16): grid[:, 0:256, (j-i)*256:(j-i)*256+256]= image[j] for j in range (128, y*256-128, 128): for i in range (128, x*256-128, 128): real = grid[:,i:i+256, j:j+256] real1= real[:, :(size),:(size)] real2= real[:, (size):,:(size)] real3= real[:, :(size),(size):] real1=(real1.astype(np.float32)-127.5)/127.5 real2=(real2.astype(np.float32)-127.5)/127.5 real3=(real3.astype(np.float32)-127.5)/127.5 latents = np.random.randn(3, 128, 128) left = np.concatenate((real1, real2), axis=1) print('left:'+str(left.shape)) right = np.concatenate((real3, latents), axis=1) lat_and_cond = np.concatenate((left, right), axis=2) lat_and_cond = lat_and_cond[np.newaxis] fake_images_out_small = Gs.get_output_for(lat_and_cond, is_training=False) fake_images_out_small = (fake_images_out_small.eval()*127.5)+127.5 print(fake_images_out_small.shape) fake_images_out_small=fake_images_out_small[0, :,:,:] grid[:,i+128:i+256, j+128:j+256]=fake_images_out_small images = grid[np.newaxis] misc.save_image_grid(images, os.path.join(result_subdir, 'grid.png'), [0,255], grid_size) open(os.path.join(result_subdir, '_done.txt'), 'wt').close()
def generate_fake_interpolate_midle_images(run_id, snapshot=None, grid_size=[1, 1], num_pngs=1, image_shrink=1, png_prefix=None, random_seed=1000, minibatch_size=8, middle_img=10): network_pkl = misc.locate_network_pkl(run_id, snapshot) if png_prefix is None: png_prefix = misc.get_id_string_for_network_pkl(network_pkl) + '-' random_state = np.random.RandomState(random_seed) print('Loading network from "%s"...' % network_pkl) G, D, Gs = misc.load_network_pkl(run_id, snapshot) result_subdir = misc.create_result_subdir(config.result_dir, config.desc) for png_idx in range(num_pngs): latents = misc.random_latents(middle_img + 2, Gs, random_state=random_state) from_to_tensor = latents[middle_img + 1] - latents[0] from_z = latents[0] #between_x_list = [from_x] counter = 0 for alpha in np.linspace(-0.5, 0.5, middle_img + 2): #np.linspace(0, 1, middle_img + 1): print('alpha: ', alpha, 'counter= ', counter) between_z = from_z + alpha * from_to_tensor latents[counter] = between_z counter += 1 labels = np.zeros([latents.shape[0], 0], np.float32) images = Gs.run(latents, labels, minibatch_size=minibatch_size, num_gpus=config.num_gpus, out_mul=127.5, out_add=127.5, out_shrink=image_shrink, out_dtype=np.uint8) #grid_size_1=[middle_img+2,1] grid_size_1 = [middle_img + 1, 1] #png_prefix=0 misc.save_image_grid( images[1:, :, :, :], os.path.join(result_subdir, '%s%06d.png' % (png_prefix, png_idx)), [0, 255], grid_size_1) '''
def find_latent_with_query_image(run_id, snapshot=None, grid_size=[1,1], num_pngs=1, image_shrink=1, png_prefix=None, random_seed=4123, minibatch_size=8): network_pkl = misc.locate_network_pkl(run_id, snapshot) if png_prefix is None: png_prefix = misc.get_id_string_for_network_pkl(network_pkl) + '-' random_state = np.random.RandomState(random_seed) print('Loading network from "%s"...' % network_pkl) G, D, Gs = misc.load_network_pkl(run_id, snapshot) result_subdir = misc.create_result_subdir(config.result_dir, config.desc) # Create query image - tensorflow constant query_image = cv2.imread('../../data/ACDC/training/patient001/cardiac_cycles/0/0.png') query_image = cv2.resize(query_image, (256, 256)) print('Saving query image to "%s"...' % result_subdir) cv2.imwrite(result_subdir+'/query_image.png', query_image) query_image = query_image.transpose(2,0,1) query_image = query_image[np.newaxis] x = tf.constant(query_image, dtype=tf.float32, name='query_image') # Create G(z) - tensorflow variable and label latent = misc.random_latents(np.prod(grid_size), Gs, random_state=random_state) initial = tf.constant(latent, dtype=tf.float32) z = tf.Variable(initial_value=initial, dtype=tf.float32, name='latent_space') label = np.zeros([latent.shape[0], 5], np.float32) label[:,4] = 1 # | 0 -> NOR | 1 -> DCM | 2 -> HCM | 3 -> MINF | 4 -> RV | gz = Gs.run(latent, label, minibatch_size=minibatch_size, num_gpus=config.num_gpus, out_mul=127.5, out_add=127.5, out_shrink=image_shrink, out_dtype=np.float32) gz = tf.Variable(gz, dtype=tf.float32) # Define a loss function residual_loss = tf.losses.absolute_difference(x, gz) # Define an optimizer train_op = tf.train.AdamOptimizer(learning_rate=0.01).minimize(residual_loss) zs, gzs, step = [], [], 1 with tf.Session() as sess: sess.run(tf.global_variables_initializer()) _, loss_value = sess.run([train_op, residual_loss]) while (loss_value > 2e-04 and step <= 50000): _, loss_value = sess.run([train_op, residual_loss]) step += 1 if step % 10000 == 0: print('Step {}, Loss value: {}'.format(step, loss_value)) gzs.append(sess.run(gz)) zs.append(sess.run(z)) for png_idx, image in enumerate(gzs): misc.save_image_grid(image, os.path.join(result_subdir, '%s%06d.png' % (png_prefix, png_idx)), [0,255], grid_size) np.save(result_subdir+'/zs.npy', np.asarray(zs))
def generate_fake_images(run_id, snapshot=None, grid_size=[1,1], num_pngs=1, image_shrink=1, png_prefix=None, random_seed=1000, minibatch_size=8): network_pkl = misc.locate_network_pkl(run_id, snapshot) if png_prefix is None: png_prefix = misc.get_id_string_for_network_pkl(network_pkl) + '-' random_state = np.random.RandomState(random_seed) print('Loading network from "%s"...' % network_pkl) G, D, Gs = misc.load_network_pkl(run_id, snapshot) result_subdir = misc.create_result_subdir(config.result_dir, config.desc) for png_idx in range(num_pngs): print('Generating png %d / %d...' % (png_idx, num_pngs)) latents = misc.random_latents(np.prod(grid_size), Gs, random_state=random_state) labels = np.zeros([latents.shape[0], 0], np.float32) images = Gs.run(latents, labels, minibatch_size=minibatch_size, num_gpus=config.num_gpus, out_mul=127.5, out_add=127.5, out_shrink=image_shrink, out_dtype=np.uint8) misc.save_image_grid(images, os.path.join(result_subdir, '%s%06d.png' % (png_prefix, png_idx)), [0,255], grid_size) open(os.path.join(result_subdir, '_done.txt'), 'wt').close()
def generate_fake_images_cond(run_id, snapshot=None, grid_size=[1,1], num_pngs=1, image_shrink=1, png_prefix=None, random_seed=1000, minibatch_size=1): network_pkl = misc.locate_network_pkl(run_id, snapshot) if png_prefix is None: png_prefix = misc.get_id_string_for_network_pkl(network_pkl) + '-' random_state = np.random.RandomState(random_seed) training_set = dataset.load_dataset(data_dir=config.data_dir, verbose=True, **config.dataset) print('Loading network from "%s"...' % network_pkl) G, D, Gs = misc.load_network_pkl(run_id, snapshot) real, label = training_set.get_minibatch_np(num_pngs) result_subdir = misc.create_result_subdir(config.result_dir, config.desc) size=128 for png_idx in range(num_pngs): real1= real[png_idx,:, :(size),:(size)] real2= real[png_idx,:, (size):,:(size)] real3= real[png_idx,:, :(size),(size):] real1=(real1.astype(np.float32)-127.5)/127.5 real2=(real2.astype(np.float32)-127.5)/127.5 real3=(real3.astype(np.float32)-127.5)/127.5 latents = np.random.randn(3, 128, 128) left = np.concatenate((real1, real2), axis=1) print('left:'+str(left.shape)) right = np.concatenate((real3, latents), axis=1) lat_and_cond = np.concatenate((left, right), axis=2) lat_and_cond = lat_and_cond[np.newaxis] fake_images_out_small = Gs.get_output_for(lat_and_cond, is_training=False) fake_images_out_small = (fake_images_out_small.eval()*127.5)+127.5 print(fake_images_out_small.shape) fake_images_out_small=fake_images_out_small[0, :,:,:] real1=(real1.astype(np.float32)*127.5)+127.5 real2=(real2.astype(np.float32)*127.5)+127.5 real3=(real3.astype(np.float32)*127.5)+127.5 fake_image_out_right =np.concatenate((real3, fake_images_out_small), axis=1) fake_image_out_left = np.concatenate((real1, real2), axis=1) images = np.concatenate((fake_image_out_left, fake_image_out_right), axis=2) images = images[np.newaxis] misc.save_image_grid(images, os.path.join(result_subdir, '%s%06d.png' % (png_prefix, png_idx)), [0,255], grid_size) open(os.path.join(result_subdir, '_done.txt'), 'wt').close()
def generate_fake_images(run_id, snapshot=None, grid_size=[1, 1], num_pngs=1, image_shrink=1, subdir=None, random_seed=1000, minibatch_size=8): network_pkl = misc.locate_network_pkl(run_id, snapshot) if subdir is None: subdir = misc.get_id_string_for_network_pkl(network_pkl) random_state = np.random.RandomState(random_seed) print('Loading network from "%s"...' % network_pkl) G, D, Gs = misc.load_network_pkl(run_id, snapshot) result_subdir = "results/images/" + subdir if not os.path.exists(result_subdir): os.makedirs(result_subdir) for png_idx in range(num_pngs): print('Generating png %d / %d...' % (png_idx, num_pngs)) latents = random_latents(np.prod(grid_size), Gs, random_state=random_state) labels = np.zeros([latents.shape[0], 0], np.float32) images = Gs.run(latents, labels, minibatch_size=minibatch_size, num_gpus=1, out_mul=127.5, out_add=127.5, out_shrink=image_shrink, out_dtype=np.uint8, randomize_noise=False) misc.save_image_grid( images, os.path.join(result_subdir, '%06d.png' % (png_idx)), [0, 255], grid_size) np.save(result_subdir + "/" + '%06d' % (png_idx), latents)
def generate_fake_images(run_id, snapshot=None, grid_size=[1, 1], num_pngs=1, image_shrink=1, png_prefix=None, random_seed=1000, minibatch_size=8): embeddings_contant = False labels_constant = False latents_constant = False idx = random.randint(0, 56880) df = pandas.read_csv('datasets/50k_sorted_tf/50k_index_sorted.csv') print('embeddings_contant : ' + str(embeddings_contant)) print('labels_constant : ' + str(labels_constant)) print('latents_constant : ' + str(latents_constant)) network_pkl = misc.locate_network_pkl(run_id, snapshot) if png_prefix is None: png_prefix = misc.get_id_string_for_network_pkl(network_pkl) + '-' random_state = np.random.RandomState(random_seed) print('Loading network from "%s"...' % network_pkl) G, D, Gs = misc.load_network_pkl(run_id, snapshot) result_subdir = misc.create_result_subdir(config.result_dir + '/' + run_id, config.desc) if latents_constant: latents = misc.random_latents(np.prod(grid_size), Gs, random_state=None) #embeddings = np.zeros([1, 300], dtype=np.float32) #labels = np.zeros([1, 32], dtype=np.float32) embeddings = np.load( 'datasets/50k_sorted_tf/sum_embedding_title.embeddings') embeddings = embeddings.astype('float32') labels = np.load( 'datasets/50k_sorted_tf/sum_embedding_category_average.labels') labels = labels.astype('float32') name1 = '' if labels_constant: label = labels[idx] name1 = name1 + ' ' + df.at[idx, 'category1'] label = label.reshape(1, label.shape[0]) if embeddings_contant: embedding = embeddings[idx] title = df.at[idx, 'title'] name1 = name1 + ' ' + title[:10] embedding = embedding.reshape(1, embedding.shape[0]) #print(latents.shape) for png_idx in range(num_pngs): name = '' name = name + name1 print('Generating png %d / %d...' % (png_idx, num_pngs)) rand = random.randint(0, 56880) #rand = png_idx * 1810 #labels = sess.run(classes[0]) if not latents_constant: latents = misc.random_latents(np.prod(grid_size), Gs, random_state=random_state) if not labels_constant: label = labels[rand] label = label.reshape(1, label.shape[0]) name = name + ' ' + df.at[rand, 'category1'] if not embeddings_contant: embedding = embeddings[rand] title = df.at[rand, 'title'] name = name + ' ' + title[:10] embedding = embedding.reshape(1, embedding.shape[0]) #print(labels.shape) images = Gs.run(latents, label, embedding, minibatch_size=minibatch_size, num_gpus=config.num_gpus, out_mul=127.5, out_add=127.5, out_shrink=image_shrink, out_dtype=np.uint8) misc.save_image_grid( images, os.path.join(result_subdir, '%s%06d.png' % (name, png_idx)), [0, 255], grid_size) open(os.path.join(result_subdir, '_done.txt'), 'wt').close()
def find_dir_latent_with_query_image(run_id, snapshot=None, grid_size=[1,1], num_pngs=1, image_shrink=1, png_prefix=None, random_seed=4123, minibatch_size=8, dir_path='../../data/ACDC/latents/cleaned_testing/'): network_pkl = misc.locate_network_pkl(run_id, snapshot) if png_prefix is None: png_prefix = misc.get_id_string_for_network_pkl(network_pkl) + '-' random_state = np.random.RandomState(random_seed) print('Loading network from "%s"...' % network_pkl) G, D, Gs = misc.load_network_pkl(run_id, snapshot) result_subdir = misc.create_result_subdir(config.result_dir, config.desc) replicate_folder_structure(dir_path, result_subdir+'/') train_patients = sorted_nicely(glob.glob(dir_path+'*')) for patient in train_patients: cardiac_cycles = sorted_nicely(glob.glob(patient+'/*/*/*.png')) cfg = open(patient+'/Info.cfg') label = condition_to_onehot(cfg.readlines()[2][7:]) cont = 0 for cycle in cardiac_cycles: # Get folder containing the image supfolder = sup_folder(cycle) latent_subir = result_subdir + '/' + supfolder # Create query image - tensorflow constant query_image = cv2.imread(cycle) # read frame query_image = cv2.resize(query_image, (256, 256)) query_image = query_image.transpose(2,0,1) query_image = query_image[np.newaxis] x = tf.constant(query_image, dtype=tf.float32, name='query_image') # Create G(z) - tensorflow variable and label latent = misc.random_latents(np.prod(grid_size), Gs, random_state=random_state) initial = tf.constant(latent, dtype=tf.float32) z = tf.Variable(initial_value=initial, dtype=tf.float32, name='latent_space') gz = Gs.run(latent, label, minibatch_size=minibatch_size, num_gpus=config.num_gpus, out_mul=127.5, out_add=127.5, out_shrink=image_shrink, out_dtype=np.float32) gz = tf.Variable(gz, dtype=tf.float32) # Define a loss function residual_loss = tf.losses.absolute_difference(x, gz) # Define an optimizer train_op = tf.train.AdamOptimizer(learning_rate=0.1).minimize(residual_loss) zs, gzs, step = [], [], 1 with tf.Session() as sess: sess.run(tf.global_variables_initializer()) _, loss_value = sess.run([train_op, residual_loss]) while (loss_value > 2e-04 and step <= 5000): _, loss_value = sess.run([train_op, residual_loss]) step += 1 if step % 1000 == 0: print('Step {}, Loss value: {}'.format(step, loss_value)) gzs.append(sess.run(gz)) zs.append(sess.run(z)) # save last image print('Image saved at {}'.format(os.path.join(latent_subir, '%s.png' % (cont)))) misc.save_image_grid(gzs[-1], os.path.join(latent_subir, '%02d.png' % (cont)), [0,255], grid_size) print('Latent vectors saved at {}'.format(os.path.join(latent_subir, 'latent_%02d.npy' % (cont)))) np.save(os.path.join(latent_subir, 'latent_%02d.npy' % (cont)), zs[-1]) print('Labels saved at {}'.format(os.path.join(latent_subir, 'label_%02d.npy' % (cont)))) np.save(os.path.join(latent_subir, 'label_%02d.npy' % (cont)), label) cont+=1 cfg.close() cont = 0