def on_epoch_end (self, epoch, logs=None): """Calls model_weights_to_images and writes the images to self.dir\<layer_name>_<epoch number>.png""" images = WeightWriter.model_weights_to_images (self.model) # write the images for name, image in images: save_image (join (self.dir, ".".join ([name, str (epoch).zfill (4), "png"])), image)
def display_current_results(self, visuals, size, epoch, iteration): if self.tboard: img_summaries = [] for label, image_numpy in visuals.items(): self.writer.add_image("size-{}-epoch-{}-iter-{}-{}.jpg".format( size, epoch, iteration, label), image_numpy, dataformats='HWC') if self.use_html: for label, image_numpy in visuals.items(): if isinstance(image_numpy, list): for i in range(len(image_numpy)): img_path = os.path.join( self.img_dir, "size-{}-epoch-{}-iter-{}-{}_{}.jpg".format( size, epoch, iteration, label, i)) misc.save_image(image_numpy[i], img_path) else: img_path = os.path.join( self.img_dir, "size-{}-epoch-{}-iter-{}-{}.jpg".format( size, epoch, iteration, label)) misc.save_image(image_numpy, img_path) # update webpage webpage = html.HTML(self.web_dir, "Experiment name = {}".format(self.name), refresh=30) images = sorted(os.listdir(self.img_dir), reverse=True) webpage.add_header("Training process visulization") ims = [] txts = [] links = [] for image in images: ims.append(image) links.append(image) label = image.split('.')[0].split('_')[0].split('-')[-1] label += ": " + image.split('.')[0].replace('_', '@') txts.append(label) if len(ims) < 10: webpage.add_images(ims, txts, links, width=self.win_size) else: num = int(round(len(ims) / 2.0)) webpage.add_images(ims[:num], txts[:num], links[:num], width=self.win_size) webpage.add_images(ims[num:], txts[num:], links[num:], width=self.win_size) webpage.save()
def encode_patchwork(inp, out, msg): """Hides a text message in pixels of two key streams.""" assert os.path.isfile(inp), '%s is not a file.' % inp img = read_img(inp) A = get_random_pos(len(msg), img) B = get_random_pos(len(msg), img, A) print "A: %s" % json.dumps(A) print "B: %s" % json.dumps(B) #while writing an jpg image, compression destroys the steganographic message ext = os.path.splitext(out)[1].lower() assert ext != '.jpeg' and ext != '.jpg', 'jpg/jpeg is currently not a valid extension for output images.' save_image(out, encode_msg_with_patchwork(img, msg, A, B))
def encode_patchwork(inp, out, msg): """Hides a text message in pixels of two key streams.""" assert os.path.isfile(inp), '%s is not a file.' % inp img = read_img(inp) A = get_random_pos(len(msg), img) B = get_random_pos(len(msg), img, A) print "A: %s" %json.dumps(A) print "B: %s" %json.dumps(B) #while writing an jpg image, compression destroys the steganographic message ext = os.path.splitext(out)[1].lower() assert ext != '.jpeg' and ext != '.jpg', 'jpg/jpeg is currently not a valid extension for output images.' save_image(out, encode_msg_with_patchwork(img, msg, A, B))
def on_epoch_end(self, epoch, logs=None): """Run prediction over n items, store inputs and outputs in <dir>/<epoch_number>, store scores in <dir>/<epoch_number>/scores.csv""" epoch_path = join(self.dir, str(epoch).zfill(4)) try: makedirs(epoch_path) except IOError: pass for i in range(0, self.count): X, _ = next(self.data) for j in range(0, X.shape[0]): data_path = join(epoch_path, "%04i_%04i_in.png" % (i, j)) save_image(data_path, X[j, :, :, 0]) Y = self.model.predict(X, batch_size=X.shape[0]) # for multiple outputs, assume the first output is an image... if type(Y) is type([]): for idx, y in enumerate(Y): for j in range(0, y.shape[0]): pred_path = join( epoch_path, "%04i_%04i_pred_%02i.png" % (i, j, idx)) save_image(pred_path, y[j, :, :, 0]) else: for j in range(0, Y.shape[0]): pred_path = join(epoch_path, "%04i_%04i_pred.png" % (i, j)) save_image(pred_path, Y[j, :, :, 0])
def write_img(path, img_array): image = img_array.reshape(img_array.shape) save_image(path, image)
def generate_fake_images(run_id, snapshot=None, grid_size=[1, 1], batch_size=8, num_pngs=1, image_shrink=1, png_prefix=None, random_seed=1000, minibatch_size=8): network_pkl = misc.locate_network_pkl(run_id, snapshot) if png_prefix is None: png_prefix = misc.get_id_string_for_network_pkl(network_pkl) + '-' random_state = np.random.RandomState(random_seed) print('Loading network from "%s"...' % network_pkl) G, D, Gs = misc.load_network_pkl(run_id, snapshot) lsfm_model = m3io.import_lsfm_model( '/home/baris/Projects/faceganhd/models/all_all_all.mat') lsfm_tcoords = \ mio.import_pickle('/home/baris/Projects/team members/stelios/UV_spaces_V2/UV_dicts/full_face/512_UV_dict.pkl')[ 'tcoords'] lsfm_params = [] result_subdir = misc.create_result_subdir(config.result_dir, config.desc) for png_idx in range(int(num_pngs / batch_size)): start = time.time() print('Generating png %d-%d / %d... in ' % (png_idx * batch_size, (png_idx + 1) * batch_size, num_pngs), end='') latents = misc.random_latents(np.prod(grid_size) * batch_size, Gs, random_state=random_state) labels = np.zeros([latents.shape[0], 0], np.float32) images = Gs.run(latents, labels, minibatch_size=minibatch_size, num_gpus=config.num_gpus, out_shrink=image_shrink) for i in range(batch_size): if images.shape[1] == 3: mio.export_pickle( images[i], os.path.join( result_subdir, '%s%06d.pkl' % (png_prefix, png_idx * batch_size + i))) # misc.save_image(images[i], os.path.join(result_subdir, '%s%06d.png' % (png_prefix, png_idx*batch_size+i)), [0,255], grid_size) elif images.shape[1] == 6: mio.export_pickle(images[i][3:6], os.path.join( result_subdir, '%s%06d.pkl' % (png_prefix, png_idx * batch_size + i)), overwrite=True) misc.save_image( images[i][0:3], os.path.join( result_subdir, '%s%06d.png' % (png_prefix, png_idx * batch_size + i)), [-1, 1], grid_size) elif images.shape[1] == 9: texture = Image(np.clip(images[i, 0:3] / 2 + 0.5, 0, 1)) mesh_raw = from_UV_2_3D(Image(images[i, 3:6])) normals = images[i, 6:9] normals_norm = (normals - normals.min()) / (normals.max() - normals.min()) mesh = lsfm_model.reconstruct(mesh_raw) lsfm_params.append(lsfm_model.project(mesh_raw)) t_mesh = TexturedTriMesh(mesh.points, lsfm_tcoords.points, texture, mesh.trilist) m3io.export_textured_mesh( t_mesh, os.path.join(result_subdir, '%06d.obj' % (png_idx * minibatch_size + i)), texture_extension='.png') mio.export_image( Image(normals_norm), os.path.join( result_subdir, '%06d_nor.png' % (png_idx * minibatch_size + i))) shape = images[i, 3:6] shape_norm = (shape - shape.min()) / (shape.max() - shape.min()) mio.export_image( Image(shape_norm), os.path.join( result_subdir, '%06d_shp.png' % (png_idx * minibatch_size + i))) mio.export_pickle( t_mesh, os.path.join(result_subdir, '%06d.pkl' % (png_idx * minibatch_size + i))) print('%0.2f seconds' % (time.time() - start)) open(os.path.join(result_subdir, '_done.txt'), 'wt').close()
def generate_fake_images(run_id, snapshot=None, grid_size=[1, 1], batch_size=8, num_pngs=1, image_shrink=1, png_prefix=None, random_seed=1000, minibatch_size=8): network_pkl = misc.locate_network_pkl(run_id, snapshot) if png_prefix is None: png_prefix = misc.get_id_string_for_network_pkl(network_pkl) + '-' random_state = np.random.RandomState(random_seed) print('Loading network from "%s"...' % network_pkl) G, D, Gs = misc.load_network_pkl(run_id, snapshot) result_subdir = misc.create_result_subdir(config_test.result_dir, config_test.desc) for png_idx in range(int(num_pngs / batch_size)): start = time.time() print('Generating png %d-%d / %d... in ' % (png_idx * batch_size, (png_idx + 1) * batch_size, num_pngs), end='') latents = misc.random_latents(np.prod(grid_size) * batch_size, Gs, random_state=random_state) labels = np.zeros([latents.shape[0], 7], np.float32) images = Gs.run(latents, labels, minibatch_size=minibatch_size, num_gpus=config_test.num_gpus, out_shrink=image_shrink) for i in range(batch_size): if images.shape[1] == 3: mio.export_pickle( images[i], os.path.join( result_subdir, '%s%06d.pkl' % (png_prefix, png_idx * batch_size + i))) # misc.save_image(images[i], os.path.join(result_subdir, '%s%06d.png' % (png_prefix, png_idx*batch_size+i)), [0,255], grid_size) elif images.shape[1] == 6: mio.export_pickle(images[i][3:6], os.path.join( result_subdir, '%s%06d.pkl' % (png_prefix, png_idx * batch_size + i)), overwrite=True) misc.save_image( images[i][0:3], os.path.join( result_subdir, '%s%06d.png' % (png_prefix, png_idx * batch_size + i)), [-1, 1], grid_size) elif images.shape[1] == 9: mio.export_pickle(images[i][3:6], os.path.join( result_subdir, '%s%06d_shp.pkl' % (png_prefix, png_idx * batch_size + i)), overwrite=True) mio.export_pickle(images[i][6:9], os.path.join( result_subdir, '%s%06d_nor.pkl' % (png_prefix, png_idx * batch_size + i)), overwrite=True) misc.save_image( images[i][0:3], os.path.join( result_subdir, '%s%06d.png' % (png_prefix, png_idx * batch_size + i)), [-1, 1], grid_size) print('%0.2f seconds' % (time.time() - start)) open(os.path.join(result_subdir, '_done.txt'), 'wt').close()