def start(self): if config.evolution.fitness.generator == "FID" or config.stats.calc_fid_score or config.stats.calc_fid_score_best: generative_score.initialize_fid( self.train_loader, sample_size=config.evolution.fitness.fid_sample_size) generators_population = self.evolutionary_algorithm.intialize_population( config.gan.generator.population_size, Generator, output_size=self.input_shape) discriminators_population = self.evolutionary_algorithm.intialize_population( config.gan.discriminator.population_size, Discriminator, output_size=1, input_shape=[1] + list(self.input_shape)) # initial evaluation self.evolutionary_algorithm.evaluate_population( generators_population.phenotypes(), discriminators_population.phenotypes()) for generation in tqdm(range(config.evolution.max_generations - 1)): self.stats.generate(generators_population, discriminators_population, generation) # executes selection, reproduction and replacement to create the next population generators_population, discriminators_population = self.evolutionary_algorithm.compute_generation( generators_population, discriminators_population) # stats for last generation self.stats.generate(generators_population, discriminators_population, generation + 1)
def getFIDScore(self): self.Gen_network = self.Gen_network.to(self.device) self.Gen_network.eval() noise = torch.randn(1000, 100, 1, 1, device=self.device) generated_images = self.Gen_network(noise).detach() self.Gen_network.zero_grad() self.Gen_network.cpu() torch.cuda.empty_cache() # Get FID score for the model: global base_fid_statistics, inception_model if (base_fid_statistics is None and inception_model is None): base_fid_statistics, inception_model = generative_score.initialize_fid( self.dataloader, sample_size=1000) inception_model = tools.cuda(inception_model) m1, s1 = fid_score.calculate_activation_statistics( generated_images.data.cpu().numpy(), inception_model, cuda=tools.is_cuda_available(), dims=2048) inception_model.cpu() m2, s2 = base_fid_statistics ret = fid_score.calculate_frechet_distance(m1, s1, m2, s2) torch.cuda.empty_cache() return ret
def start(self): if config.evolution.fitness.generator == "FID" or config.stats.calc_fid_score: generative_score.initialize_fid(self.train_loader, sample_size=config.evolution.fitness.fid_sample_size) generators_population, discriminators_population = self.generate_intial_population() # initialize best_discriminators and best_generators with random individuals best_discriminators = list(np.random.choice(discriminators_population.phenotypes(), config.evolution.evaluation.best_size, replace=False)) best_generators = list(np.random.choice(generators_population.phenotypes(), config.evolution.evaluation.best_size, replace=False)) # initial evaluation self.evaluate_population(generators_population.phenotypes(), discriminators_population.phenotypes(), generators_population, discriminators_population, best_generators, best_discriminators, initial=True) # store best individuals best_discriminators = self.get_bests(discriminators_population, best_discriminators) best_generators = self.get_bests(generators_population, best_generators) generation = 0 for generation in tqdm(range(config.evolution.max_generations-1)): self.stats.generate(self.input_shape, generators_population, discriminators_population, generation, config.evolution.max_generations, self.train_loader, self.validation_loader) # select parents for reproduction g_parents = self.select(generators_population) d_parents = self.select(discriminators_population) # apply variation operators (only mutation for now) g_children = self.generate_children(g_parents, generation) # limit the number of layers in D's to the max layers among G's max_layers_g = max([len(gc.genome.genes) for gc in g_children]) for s in d_parents: for dp in s: dp[0].genome.max_layers = max_layers_g d_children = self.generate_children(d_parents, generation) # evaluate the children population and the best individuals (when elitism is being used) logger.debug(f"[generation {generation}] evaluate population") self.evaluate_population(g_children, d_children, generators_population, discriminators_population, best_generators, best_discriminators) # store best of generation in coevolution memory best_discriminators = self.get_bests(discriminators_population, best_discriminators) best_generators = self.get_bests(generators_population, best_generators) # generate a new population based on the fitness of the children and elite individuals generators_population, discriminators_population = self.replace_population(generators_population, discriminators_population, g_children, d_children) # stats for last generation self.stats.generate(self.input_shape, generators_population, discriminators_population, generation+1, config.evolution.max_generations, self.train_loader, self.validation_loader)
ax[i, j].get_yaxis().set_visible(False) path = path + 'Epoch_{0}_'.format(epoch) + str(i) + '.png' for k in range(5 * 5): i = k // 5 j = k % 5 ax[i, j].cla() ax[i, j].imshow(fake[k, 0].cpu().data.numpy(), cmap='gray') fig.savefig(path) plt.close() iters += 1 # Get FID score for the model: from metrics import generative_score base_fid_statistics, inception_model = generative_score.initialize_fid(dataloader, sample_size=1000) from metrics.fid import fid_score from util import tools noise = torch.randn(1000, 100, 1, 1, device=device) netG.eval() generated_images = netG(noise).detach() inception_model = tools.cuda(inception_model) m1, s1 = fid_score.calculate_activation_statistics( generated_images.data.cpu().numpy(), inception_model, cuda=tools.is_cuda_available(), dims=2048) inception_model.cpu() m2, s2 = base_fid_statistics ret = fid_score.calculate_frechet_distance(m1, s1, m2, s2) netG.zero_grad() print("Fid score is :",ret)