예제 #1
0
    def initialize_populations(self):
        gen = self.network_factory.create_generator()
        dis = self.network_factory.create_discriminator()

        population_gen = Population(individuals=[Individual(genome=gen, fitness=gen.default_fitness)],
                                    default_fitness=gen.default_fitness)

        population_dis = Population(individuals=[Individual(genome=dis, fitness=dis.default_fitness)],
                                    default_fitness=dis.default_fitness)

        return population_gen, population_dis
예제 #2
0
def generate_samples(args, cc):
    print("generating samples")
    batch_size = 100

    mixture_source = args.mixture_source
    output_dir = args.output_dir
    sample_size = args.sample_size

    dataloader = cc.create_instance(cc.settings['dataloader']['dataset_name'])
    network_factory = cc.create_instance(cc.settings['network']['name'],
                                         dataloader.n_input_neurons)

    population = Population(individuals=[], default_fitness=0)
    mixture_definition = read_settings(
        os.path.join(mixture_source, 'mixture.yml'))
    for source, weight in mixture_definition.items():
        path = os.path.join(mixture_source, source)
        generator = network_factory.create_generator()
        generator.net.load_state_dict(torch.load(path))
        generator.net.eval()
        population.individuals.append(
            Individual(genome=generator, fitness=0, source=source))

    dataset = MixedGeneratorDataset(
        population, mixture_definition, sample_size * batch_size,
        cc.settings['trainer']['mixture_generator_samples_mode'])
    os.makedirs(output_dir, exist_ok=True)
    LipizzanerMaster().save_samples(dataset, output_dir, dataloader,
                                    sample_size, batch_size)
예제 #3
0
def calc_score(args, cc):
    score_calc = ScoreCalculatorFactory.create()
    cc.settings['general']['distribution']['client_id'] = 0
    dataloader = cc.create_instance(cc.settings['dataloader']['dataset_name'])
    network_factory = cc.create_instance(cc.settings['network']['name'],
                                         dataloader.n_input_neurons)

    device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
    generator = network_factory.create_generator()
    generator.net.load_state_dict(
        torch.load(args.generator_file, map_location=device))
    generator.net.eval()
    individual = Individual(genome=generator, fitness=0, source='local')

    dataset = MixedGeneratorDataset(
        Population(individuals=[individual],
                   default_fitness=0), {'local': 1.0}, 50000,
        cc.settings['trainer']['mixture_generator_samples_mode'])

    output_dir = os.path.join(cc.output_dir, 'score')
    os.makedirs(output_dir, exist_ok=True)
    LipizzanerMaster().save_samples(dataset, output_dir, dataloader)
    inc = score_calc.calculate(dataset)
    _logger.info('Generator loaded from \'{}\' yielded a score of {}'.format(
        args.generator_file, inc))
예제 #4
0
 def _create_population(all_parameters, create_genome, population_type):
     individuals = [
         Individual.decode(create_genome,
                           parameters['parameters'],
                           source=parameters['source'])
         for parameters in all_parameters
         if parameters and len(parameters) > 0
     ]
     return Population(individuals, float('-inf'), population_type)
예제 #5
0
 def _parse_individual(json, create_genome):
     return Individual.decode(create_genome,
                              json['parameters'],
                              is_local=False,
                              learning_rate=json['learning_rate'],
                              optimizer_state=StateEncoder.decode(
                                  json['optimizer_state']),
                              source=json['source'],
                              id=json['id'])
예제 #6
0
    def initialize_populations(self):
        populations = [None] * 2
        populations[TYPE_GENERATOR] = Population(
            individuals=[], default_fitness=0, population_type=TYPE_GENERATOR)
        populations[TYPE_DISCRIMINATOR] = Population(
            individuals=[],
            default_fitness=0,
            population_type=TYPE_DISCRIMINATOR)

        for i in range(self._population_size):
            gen, dis = self.network_factory.create_both()
            populations[TYPE_GENERATOR].individuals.append(
                Individual(genome=gen, fitness=gen.default_fitness))
            populations[TYPE_DISCRIMINATOR].individuals.append(
                Individual(genome=dis, fitness=dis.default_fitness))

        populations[TYPE_GENERATOR].default_fitness = populations[
            TYPE_GENERATOR].individuals[0].fitness
        populations[TYPE_DISCRIMINATOR].default_fitness = populations[
            TYPE_DISCRIMINATOR].individuals[0].fitness

        return populations[TYPE_GENERATOR], populations[TYPE_DISCRIMINATOR]
예제 #7
0
def calc_inception_score(args, cc):
    inception_cal = InceptionCalculator(cuda=True)
    dataloader = cc.create_instance(cc.settings['dataloader']['dataset_name'])
    network_factory = cc.create_instance(cc.settings['network']['name'],
                                         dataloader.n_input_neurons)

    generator = network_factory.create_generator()
    generator.net.load_state_dict(torch.load(args.inception_file))
    generator.net.eval()
    individual = Individual(genome=generator, fitness=0, source='local')

    dataset = MixedGeneratorDataset(
        Population(individuals=[individual], default_fitness=0),
        {'local': 1.0}, 50000)

    output_dir = os.path.join(cc.output_dir, 'inception_score')
    os.makedirs(output_dir, exist_ok=True)
    LipizzanerMaster().save_samples(dataset, output_dir, dataloader)
    inc = inception_cal.calculate(dataset)
    _logger.info(
        'Generator loaded from \'{}\' yielded an inception score of {}'.format(
            args.inception_file, inc))
    def evaluate_ensemble(self,
                          individual,
                          network_factory,
                          mixture_generator_samples_mode='exact_proportion',
                          fitness_type='tvd'):
        """It evaluates the solution/individual (ensemble) given the fitness type. It generates samples and it evaluates
        the metric defined by fitness_type using Lipizzaner.
        :parameter individual: Solutionto be evaluated
        :parameter network_factory:
        :parameter mixture_generator_samples_mode:
        :parameter fitness_type: It defines the type of metric to be evaluated.
        :return: The fitness_type metric value got by the solution.
        """
        population = Population(individuals=[], default_fitness=0)
        # weight_and_generator_indices = [math.modf(gen) for gen in individual]
        # generators_paths, sources = self.ga.get_generators_for_ensemble(weight_and_generator_indices)
        # tentative_weights = [weight for weight, generator_index in weight_and_generator_indices]
        tentative_weights, generators_paths, sources = self.ga.get_mixture_from_individual(
            individual)
        mixture_definition = dict(zip(sources, tentative_weights))
        for path, source in zip(generators_paths, sources):
            generator = network_factory.create_generator()
            generator.net.load_state_dict(torch.load(path, map_location='cpu'))
            generator.net.eval()
            population.individuals.append(
                Individual(genome=generator, fitness=0, source=source))
        dataset = MixedGeneratorDataset(population, mixture_definition, 50000,
                                        mixture_generator_samples_mode)
        fid, tvd = self.score_calc.calculate(dataset)

        if fitness_type == 'tvd':
            return tvd,
        elif fitness_type == 'fid':
            return fid,
        elif fitness_type == 'tvd-fid':
            return (tvd, fid),
    def create_ensemble(self):
        n_samples = 50000
        using_max_size = self.ensemble_max_size != 0

        population = Population(individuals=[], default_fitness=0)
        sources = []

        current_tvd = 1.0
        current_fid = 100
        current_mixture_definition = dict()
        generators_examined = 0

        self.show_experiment_configuration()

        start_time = time.time()
        while True:
            next_generator_path, source = self.get_next_generator_path()
            if next_generator_path == '':
                text = 'Warning: \n'
                text += 'No more generators to be examined to be added to the ensemble. \n'
                text += 'Generators examined: {}\n'.format(generators_examined)
                self.show_file_screen(text)
                if self.output_file != '': self.show_file_screen(text, self.output_file)
                break
            generator = self.network_factory.create_generator()
            generator.net.load_state_dict(torch.load(next_generator_path, map_location='cpu'))
            generator.net.eval()

            population.individuals.append(Individual(genome=generator, fitness=0, source=source))
            sources.append(source)
            ensemble_size = len(population.individuals)

            tvd_tentative = 1.0
            mixture_definition_i = dict()

            combinations_of_weights, size = self.get_weights_tentative(ensemble_size)
            if size == 0:
                break

            for tentative_mixture_definition in combinations_of_weights:
                mixture_definition = dict(zip(sources, tentative_mixture_definition))
                dataset = MixedGeneratorDataset(population,
                                                mixture_definition,
                                                n_samples,
                                                self.mixture_generator_samples_mode)
                fid, tvd = self.score_calc.calculate(dataset)
                if tvd < tvd_tentative:
                    tvd_tentative = tvd
                    fid_tentative = fid
                    mixture_definition_i = mixture_definition
                generators_examined += 1
                text = 'Generators examined={} - Mixture: {} - FID={}, TVD={}, FIDi={}, TVDi={}, FIDbest={}, ' \
                       'TVDbest={}'.format(generators_examined, mixture_definition, fid, tvd, fid_tentative,
                                           tvd_tentative, current_fid, current_tvd)
                self.show_file_screen(text)
                if self.output_file != '': self.show_file_screen(text+ '\n', self.output_file)

            if tvd_tentative < current_tvd:
                current_tvd = tvd_tentative
                current_fid = fid_tentative
                current_mixture_definition = mixture_definition_i
                convergence_time = 0
            else:
                sources.pop()
                population.individuals.pop()
                convergence_time += 1

            if using_max_size and len(sources) == self.ensemble_max_size:
                break
            else:
                if self.max_time_without_improvements!= 0 and convergence_time > self.max_time_without_improvements:
                    break

        text = 'Finishing execution....\n'
        text += 'FID={}'.format(current_fid)
        text += 'TVD={}'.format(current_tvd)
        text += 'Generators examined={}'.format(generators_examined)
        text += 'Ensemble: {}'.format(current_mixture_definition)
        text += 'Execution time={} \n'.format(time.time() - start_time)

        self.show_file_screen(text)
        if self.output_file != '': self.show_file_screen(text, self.output_file)

# dataset = 'mnist'
# precision=10
# mode='random'
# ensemble_max_size = 3
# greedy = GreedyEnsembleGenerator(dataset, ensemble_max_size, precision, generators_prefix='mnist-generator', generators_path='./mnist-generators/',
#                  mode=mode)
#
# greedy.create_ensemble()