def func(trainer): c = 0 mixing = mix > random() while c < number: n = min(number - c, batch) z = generator.generate_latent(n) mix_z = generator.generate_latent(n) if mixing else None y = generator(z, stage, trainer.updater.alpha, mix_z) z.to_cpu() y.to_cpu() for i in range(n): path = filepath( directory, f"{stage}_{trainer.updater.iteration}_{trainer.updater.alpha:.3f}_{c + i + 1}", "png") path = path if force else altfilepath(path) save_image(y.array[i], path) if save_latent: path = filepath( directory, f"{stage}_{trainer.updater.iteration}_{trainer.updater.alpha:.3f}_{c + i + 1}", "npy") path = path if force else altfilepath(path) save_array(z.array[i], path) c += n
def func(trainer): path = filepath(directory, f"mopt_{stage}_{trainer.updater.iteration}", "hdf5") path = path if force else altfilepath(path) serializers.save_hdf5(path, mapper_optimizer) path = filepath(directory, f"gopt_{stage}_{trainer.updater.iteration}", "hdf5") path = path if force else altfilepath(path) serializers.save_hdf5(path, generator_optimizer) path = filepath(directory, f"dopt_{stage}_{trainer.updater.iteration}", "hdf5") path = path if force else altfilepath(path) serializers.save_hdf5(path, discriminator_optimizer)
def func(trainer): generator.to_cpu() averaged_generator.to_cpu() discriminator.to_cpu() path = filepath(directory, f"gen_{stage}_{trainer.updater.iteration}", "hdf5") path = path if force else altfilepath(path) serializers.save_hdf5(path, generator) path = filepath(directory, f"avgen_{stage}_{trainer.updater.iteration}", "hdf5") path = path if force else altfilepath(path) serializers.save_hdf5(path, averaged_generator) path = filepath(directory, f"dis_{stage}_{trainer.updater.iteration}", "hdf5") path = path if force else altfilepath(path) serializers.save_hdf5(path, discriminator) if device >= 0: generator.to_gpu(device) averaged_generator.to_gpu(device) discriminator.to_gpu(device)
def __init__(self, directory, size, preload=False): super().__init__() self.size = size self.preload = preload self.images = [] for e in ["png", "jpg", "jpeg", "gif", "bmp", "tif", "tiff"]: pattern = filepath(escape(directory), join("**", "*"), e) self.images += [ f for f in glob(pattern, recursive=True) if isfile(f) ] if preload: self.arrays = [load_image(i, size) for i in self.images]
mix = gen.generate_latent(batch) mean = gen.calculate_mean_w() i = gen(z, stage, alpha, mix, mix_stage, psi, mean) dg = build_computational_graph([i], variable_style=gvarstyle, function_style=gfuncstyle).dump() # Build the discriminator's graph dis = Discriminator(channels, max_stage) y = dis(Variable(i.array), stage, alpha) dd = build_computational_graph([y], variable_style=dvarstyle, function_style=dfuncstyle).dump() # Save the graphs as dot files dg_path = filepath(path, filename_g, "dot") dd_path = filepath(path, filename_d, "dot") with open(dg_path, "w") as f: f.write(dg) print(f"Saved: {dg_path}") with open(dd_path, "w") as f: f.write(dd) print(f"Saved: {dd_path}") # Save the graphs as PDFs pg_path = filepath(path, filename_g, "pdf") pd_path = filepath(path, filename_d, "pdf") gg = graph_from_dot_data(dg)[0] gd = graph_from_dot_data(dd)[0] gg.write_pdf(pg_path) print(f"Saved: {pg_path}")
updater = StyleGanUpdater( generator, averaged_generator, discriminator, iterator, { "mapper": mapper_optimizer, "generator": generator_optimizer, "discriminator": discriminator_optimizer }, args.device, args.stage, args.mix, args.alpha, args.delta, args.gamma, args.decay, args.lsgan) # Init result directory print("Initializing destination directory...") if args.wipe: rmtree(args.result, ignore_errors=True) mkdirp(args.result) # Dump command-line options path = filepath(args.result, "args_quit" if args.quit else "args", "json") path = path if args.force else altfilepath(path) with open(path, mode="w", encoding="utf-8") as fp: dump(vars(args), fp, indent=2, sort_keys=True) # Define extension to output images in progress def save_middle_images(generator, stage, directory, number, batch, mix, force=True, save_latent=True): @make_extension()
# Load center latent if args.center is not None: print("Loading latent") center = generator.wrap_latent(load_array(args.center)) else: center = None # Init destination folder print("Initializing destination directory") if args.wipe: rmtree(args.directory, ignore_errors=True) mkdirp(args.directory) # Dump command-line options if args.dump_json: path = filepath(args.directory, "args_quit" if args.quit else "args", "json") path = path if args.force else altfilepath(path) with open(path, mode="w", encoding="utf-8") as fp: dump(vars(args), fp, indent=2, sort_keys=True) # Quit mode if args.quit: print("Finished (Quit mode)") exit(0) # Generate images c = 0 mean_w = None if args.psi is None else generator.calculate_mean_w() while c < args.number: n = min(args.number - c, args.batch) z = generator.generate_latent(n, center=center, sd=args.sd)
append = [] # Check the number of latents if (len(prepend) + args.number + len(append) < 2): eprint("More latents required") exit(1) # Init destination folder print("Initializing destination directory") if args.wipe: rmtree(args.directory, ignore_errors=True) mkdirp(args.directory) # Dump command-line options if args.dump_json: path = filepath(args.directory, "args_quit" if args.quit else "args", "json") path = path if args.force else altfilepath(path) with open(path, mode="w", encoding="utf-8") as fp: dump(vars(args), fp, indent=2, sort_keys=True) # Quit mode if args.quit: print("Finished (Quit mode)") exit(0) # Sampling new latents c = 0 new_ws = [] mean_w = None if args.psi is None else generator.calculate_mean_w() while c < args.number: n = min(args.number - c, args.batch)