Ejemplo n.º 1
0
    def all_disc_gen_local(self):
        '''
            Get all generator and discriminators from all neighbours
        and create the Populations with them.
        Return:
            gen_pop: Population of all generators from neighbours
            disc_pop: Population of all discriminators from neighbours
        '''
        # TODO: Check if encoding is necesary or if pickle is enough
        local_gen = self.local_generators
        local_disc = self.local_discriminators
        send_data = (local_gen, local_disc)
        data = self.node_client.local_all_gather(send_data)

        generators = []
        discriminators = []
        # lamda_separator = lambda d: data[0], data[1]
        for sender_wid, elem in enumerate(data):
            # TODO: Mising neighbours filter
            for gen_indiv in elem[0].individuals:
                gen_indiv.source = sender_wid
            generators += elem[0].individuals

            for disc_indiv in elem[1].individuals:
                disc_indiv.source = sender_wid
            discriminators += elem[1].individuals

        gen_pop = Population(individuals=generators,
                             default_fitness=local_gen.default_fitness,
                             population_type=TYPE_GENERATOR)
        disc_pop = Population(individuals=discriminators,
                              default_fitness=local_disc.default_fitness,
                              population_type=TYPE_DISCRIMINATOR)
        return gen_pop, disc_pop
 def __init__(self, num_iterations=10, CR=0.4, F=0.48, dim=2, population_size=10, print_status=False, visualize=False, func=None):
     random.seed()
     self.print_status = print_status
     self.visualize = visualize
     self.num_iterations = num_iterations
     self.iteration = 0
     self.CR = CR
     self.F = F
     self.population_size = population_size
     self.func = Function(func=func)
     self.population = Population(dim=dim, num_points=self.population_size, objective=self.func)
Ejemplo n.º 3
0
    def initialize_populations(self):
        gen = self.network_factory.create_generator()
        dis = self.network_factory.create_discriminator()

        population_gen = Population(individuals=[Individual(genome=gen, fitness=gen.default_fitness)],
                                    default_fitness=gen.default_fitness)

        population_dis = Population(individuals=[Individual(genome=dis, fitness=dis.default_fitness)],
                                    default_fitness=dis.default_fitness)

        return population_gen, population_dis
Ejemplo n.º 4
0
    def all_generators(self):
        neighbour_individuals = self.node_client.get_all_generators(self.neighbours)
        local_population = self.local_generators

        return Population(individuals=neighbour_individuals + local_population.individuals,
                          default_fitness=local_population.default_fitness,
                          population_type=TYPE_GENERATOR)
Ejemplo n.º 5
0
    def all_discriminators(self):
        neighbour_individuals = self.node_client.get_all_discriminators(self.neighbours)
        local_population = self.local_discriminators

        return Population(individuals=neighbour_individuals + local_population.individuals,
                          default_fitness=local_population.default_fitness,
                          population_type=TYPE_DISCRIMINATOR)
Ejemplo n.º 6
0
def calc_score(args, cc):
    score_calc = ScoreCalculatorFactory.create()
    cc.settings['general']['distribution']['client_id'] = 0
    dataloader = cc.create_instance(cc.settings['dataloader']['dataset_name'])
    network_factory = cc.create_instance(cc.settings['network']['name'],
                                         dataloader.n_input_neurons)

    device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
    generator = network_factory.create_generator()
    generator.net.load_state_dict(
        torch.load(args.generator_file, map_location=device))
    generator.net.eval()
    individual = Individual(genome=generator, fitness=0, source='local')

    dataset = MixedGeneratorDataset(
        Population(individuals=[individual],
                   default_fitness=0), {'local': 1.0}, 50000,
        cc.settings['trainer']['mixture_generator_samples_mode'])

    output_dir = os.path.join(cc.output_dir, 'score')
    os.makedirs(output_dir, exist_ok=True)
    LipizzanerMaster().save_samples(dataset, output_dir, dataloader)
    inc = score_calc.calculate(dataset)
    _logger.info('Generator loaded from \'{}\' yielded a score of {}'.format(
        args.generator_file, inc))
Ejemplo n.º 7
0
def generate_samples(args, cc):
    print("generating samples")
    batch_size = 100

    mixture_source = args.mixture_source
    output_dir = args.output_dir
    sample_size = args.sample_size

    dataloader = cc.create_instance(cc.settings['dataloader']['dataset_name'])
    network_factory = cc.create_instance(cc.settings['network']['name'],
                                         dataloader.n_input_neurons)

    population = Population(individuals=[], default_fitness=0)
    mixture_definition = read_settings(
        os.path.join(mixture_source, 'mixture.yml'))
    for source, weight in mixture_definition.items():
        path = os.path.join(mixture_source, source)
        generator = network_factory.create_generator()
        generator.net.load_state_dict(torch.load(path))
        generator.net.eval()
        population.individuals.append(
            Individual(genome=generator, fitness=0, source=source))

    dataset = MixedGeneratorDataset(
        population, mixture_definition, sample_size * batch_size,
        cc.settings['trainer']['mixture_generator_samples_mode'])
    os.makedirs(output_dir, exist_ok=True)
    LipizzanerMaster().save_samples(dataset, output_dir, dataloader,
                                    sample_size, batch_size)
Ejemplo n.º 8
0
    def best_generators(self):
        best_neighbour_individuals = self.node_client.get_best_generators(self.neighbours)
        local_population = self.local_generators
        best_local_individual = sorted(local_population.individuals, key=lambda x: x.fitness)[0]

        return Population(individuals=best_neighbour_individuals + [best_local_individual],
                          default_fitness=local_population.default_fitness,
                          population_type=TYPE_GENERATOR)
Ejemplo n.º 9
0
 def _create_population(all_parameters, create_genome, population_type):
     individuals = [
         Individual.decode(create_genome,
                           parameters['parameters'],
                           source=parameters['source'])
         for parameters in all_parameters
         if parameters and len(parameters) > 0
     ]
     return Population(individuals, float('-inf'), population_type)
 def all_discriminators(self, sampling_size=None):
     neighbour_individuals = self.node_client.get_all_discriminators(
         self.neighbours)
     local_population = self.local_discriminators
     individuals = neighbour_individuals + local_population.individuals
     if sampling_size is not None:
         individuals = sample(individuals, sampling_size)
     return Population(individuals=individuals,
                       default_fitness=local_population.default_fitness,
                       population_type=TYPE_DISCRIMINATOR)
Ejemplo n.º 11
0
    def best_generators_local(self):
        local_population = self.local_generators
        best_local_individual = sorted(local_population.individuals,
                                       key=lambda x: x.fitness)[0]

        all_best = self.node_client.local_all_gather(best_local_individual)
        for sender_wid, indiv in enumerate(all_best):
            indiv.source = sender_wid

        return Population(individuals=all_best,
                          default_fitness=local_population.default_fitness,
                          population_type=TYPE_GENERATOR)
Ejemplo n.º 12
0
    def tournament_selection(self, population, population_type):
        assert 0 < self._tournament_size <= len(population.individuals), \
            "Invalid tournament size: {}".format(self._tournament_size)

        competition_population = Population(
            individuals=[], default_fitness=population.default_fitness)
        new_population = Population(individuals=[],
                                    default_fitness=population.default_fitness,
                                    population_type=population_type)

        # Iterate until there are enough tournament winners selected
        while len(new_population.individuals) < self._population_size:
            # Randomly select tournament size individual solutions
            # from the population.
            competitors = random.sample(population.individuals,
                                        self._tournament_size)
            competition_population.individuals = competitors

            # Rank the selected solutions
            competition_population.sort_population()

            # Copy the solution
            winner = competitors[0].clone()
            winner.is_local = True
            winner.fitness = competition_population.default_fitness

            # Append the best solution to the winners
            new_population.individuals.append(winner)

        return new_population
Ejemplo n.º 13
0
def calc_inception_score(args, cc):
    inception_cal = InceptionCalculator(cuda=True)
    dataloader = cc.create_instance(cc.settings['dataloader']['dataset_name'])
    network_factory = cc.create_instance(cc.settings['network']['name'],
                                         dataloader.n_input_neurons)

    generator = network_factory.create_generator()
    generator.net.load_state_dict(torch.load(args.inception_file))
    generator.net.eval()
    individual = Individual(genome=generator, fitness=0, source='local')

    dataset = MixedGeneratorDataset(
        Population(individuals=[individual], default_fitness=0),
        {'local': 1.0}, 50000)

    output_dir = os.path.join(cc.output_dir, 'inception_score')
    os.makedirs(output_dir, exist_ok=True)
    LipizzanerMaster().save_samples(dataset, output_dir, dataloader)
    inc = inception_cal.calculate(dataset)
    _logger.info(
        'Generator loaded from \'{}\' yielded an inception score of {}'.format(
            args.inception_file, inc))
    def evaluate_ensemble(self,
                          individual,
                          network_factory,
                          mixture_generator_samples_mode='exact_proportion',
                          fitness_type='tvd'):
        """It evaluates the solution/individual (ensemble) given the fitness type. It generates samples and it evaluates
        the metric defined by fitness_type using Lipizzaner.
        :parameter individual: Solutionto be evaluated
        :parameter network_factory:
        :parameter mixture_generator_samples_mode:
        :parameter fitness_type: It defines the type of metric to be evaluated.
        :return: The fitness_type metric value got by the solution.
        """
        population = Population(individuals=[], default_fitness=0)
        # weight_and_generator_indices = [math.modf(gen) for gen in individual]
        # generators_paths, sources = self.ga.get_generators_for_ensemble(weight_and_generator_indices)
        # tentative_weights = [weight for weight, generator_index in weight_and_generator_indices]
        tentative_weights, generators_paths, sources = self.ga.get_mixture_from_individual(
            individual)
        mixture_definition = dict(zip(sources, tentative_weights))
        for path, source in zip(generators_paths, sources):
            generator = network_factory.create_generator()
            generator.net.load_state_dict(torch.load(path, map_location='cpu'))
            generator.net.eval()
            population.individuals.append(
                Individual(genome=generator, fitness=0, source=source))
        dataset = MixedGeneratorDataset(population, mixture_definition, 50000,
                                        mixture_generator_samples_mode)
        fid, tvd = self.score_calc.calculate(dataset)

        if fitness_type == 'tvd':
            return tvd,
        elif fitness_type == 'fid':
            return fid,
        elif fitness_type == 'tvd-fid':
            return (tvd, fid),
Ejemplo n.º 15
0
    def initialize_populations(self):
        populations = [None] * 2
        populations[TYPE_GENERATOR] = Population(
            individuals=[], default_fitness=0, population_type=TYPE_GENERATOR)
        populations[TYPE_DISCRIMINATOR] = Population(
            individuals=[],
            default_fitness=0,
            population_type=TYPE_DISCRIMINATOR)

        for i in range(self._population_size):
            gen, dis = self.network_factory.create_both()
            populations[TYPE_GENERATOR].individuals.append(
                Individual(genome=gen, fitness=gen.default_fitness))
            populations[TYPE_DISCRIMINATOR].individuals.append(
                Individual(genome=dis, fitness=dis.default_fitness))

        populations[TYPE_GENERATOR].default_fitness = populations[
            TYPE_GENERATOR].individuals[0].fitness
        populations[TYPE_DISCRIMINATOR].default_fitness = populations[
            TYPE_DISCRIMINATOR].individuals[0].fitness

        return populations[TYPE_GENERATOR], populations[TYPE_DISCRIMINATOR]
class DifferentialEvolution(object):
    def __init__(self, num_iterations=10, CR=0.4, F=0.48, dim=2, population_size=10, print_status=False, visualize=False, func=None):
        random.seed()
        self.print_status = print_status
        self.visualize = visualize
        self.num_iterations = num_iterations
        self.iteration = 0
        self.CR = CR
        self.F = F
        self.population_size = population_size
        self.func = Function(func=func)
        self.population = Population(dim=dim, num_points=self.population_size, objective=self.func)

    def iterate(self):
        for ix in xrange(self.population.num_points):
            x = self.population.points[ix]
            [a, b, c] = random.sample(self.population.points, 3)
            while x == a or x == b or x == c:
                [a, b, c] = random.sample(self.population.points, 3)

            R = random.random() * x.dim
            y = copy.deepcopy(x)

            for iy in xrange(x.dim):
                ri = random.random()

                if ri < self.CR or iy == R:
                    y.coords[iy] = a.coords[iy] + self.F * (b.coords[iy] - c.coords[iy])

            y.evaluate_point()
            if y.z < x.z:
                self.population.points[ix] = y
        self.iteration += 1

    def simulate(self):
        all_vals = []
        avg_vals = []
        pnt = get_best_point(self.population.points)
        all_vals.append(pnt.z)
        avg_vals.append(self.population.get_average_objective())
        print("Initial best value: " + str(pnt.z))
        while self.iteration < self.num_iterations:
            if self.print_status == True and self.iteration%50 == 0:
                pnt = get_best_point(self.population.points)
                print pnt.z, self.population.get_average_objective()
            self.iterate()
            all_vals.append(get_best_point(self.population.points).z)
            avg_vals.append(self.population.get_average_objective())
            if self.visualize == True and self.iteration%2==0:
                self.population.get_visualization()
        # sns.figure(0)
        plt.plot(all_vals, 'r', label='Best')
        plt.plot(avg_vals, 'g', label='Average')
        plt.legend()
        plt.xlabel('Iterations')
        plt.ylabel('Objective Function Value')
        plt.title(self.func.func_name + ', ' + str(self.population.dim) + '-D')
        plt.show()
        pnt = get_best_point(self.population.points)
        print("Final best value: " + str(pnt.z))
        return pnt.z
    def create_ensemble(self):
        n_samples = 50000
        using_max_size = self.ensemble_max_size != 0

        population = Population(individuals=[], default_fitness=0)
        sources = []

        current_tvd = 1.0
        current_fid = 100
        current_mixture_definition = dict()
        generators_examined = 0

        self.show_experiment_configuration()

        start_time = time.time()
        while True:
            next_generator_path, source = self.get_next_generator_path()
            if next_generator_path == '':
                text = 'Warning: \n'
                text += 'No more generators to be examined to be added to the ensemble. \n'
                text += 'Generators examined: {}\n'.format(generators_examined)
                self.show_file_screen(text)
                if self.output_file != '': self.show_file_screen(text, self.output_file)
                break
            generator = self.network_factory.create_generator()
            generator.net.load_state_dict(torch.load(next_generator_path, map_location='cpu'))
            generator.net.eval()

            population.individuals.append(Individual(genome=generator, fitness=0, source=source))
            sources.append(source)
            ensemble_size = len(population.individuals)

            tvd_tentative = 1.0
            mixture_definition_i = dict()

            combinations_of_weights, size = self.get_weights_tentative(ensemble_size)
            if size == 0:
                break

            for tentative_mixture_definition in combinations_of_weights:
                mixture_definition = dict(zip(sources, tentative_mixture_definition))
                dataset = MixedGeneratorDataset(population,
                                                mixture_definition,
                                                n_samples,
                                                self.mixture_generator_samples_mode)
                fid, tvd = self.score_calc.calculate(dataset)
                if tvd < tvd_tentative:
                    tvd_tentative = tvd
                    fid_tentative = fid
                    mixture_definition_i = mixture_definition
                generators_examined += 1
                text = 'Generators examined={} - Mixture: {} - FID={}, TVD={}, FIDi={}, TVDi={}, FIDbest={}, ' \
                       'TVDbest={}'.format(generators_examined, mixture_definition, fid, tvd, fid_tentative,
                                           tvd_tentative, current_fid, current_tvd)
                self.show_file_screen(text)
                if self.output_file != '': self.show_file_screen(text+ '\n', self.output_file)

            if tvd_tentative < current_tvd:
                current_tvd = tvd_tentative
                current_fid = fid_tentative
                current_mixture_definition = mixture_definition_i
                convergence_time = 0
            else:
                sources.pop()
                population.individuals.pop()
                convergence_time += 1

            if using_max_size and len(sources) == self.ensemble_max_size:
                break
            else:
                if self.max_time_without_improvements!= 0 and convergence_time > self.max_time_without_improvements:
                    break

        text = 'Finishing execution....\n'
        text += 'FID={}'.format(current_fid)
        text += 'TVD={}'.format(current_tvd)
        text += 'Generators examined={}'.format(generators_examined)
        text += 'Ensemble: {}'.format(current_mixture_definition)
        text += 'Execution time={} \n'.format(time.time() - start_time)

        self.show_file_screen(text)
        if self.output_file != '': self.show_file_screen(text, self.output_file)

# dataset = 'mnist'
# precision=10
# mode='random'
# ensemble_max_size = 3
# greedy = GreedyEnsembleGenerator(dataset, ensemble_max_size, precision, generators_prefix='mnist-generator', generators_path='./mnist-generators/',
#                  mode=mode)
#
# greedy.create_ensemble()