Beispiel #1
0
def test_metaoptimize_genalg():
    optimizer = genalg.GenAlg(examplefunctions.ackley, 32, 
                              decode_func=examplefunctions.ackley_binary)
    optimizer._logging_func = lambda x, y, z : optimize._print_fitnesses(x, y, z, frequency=100)
    prev_hyperparameters = optimizer._get_hyperparameters()

    # Test without metaoptimize, save iterations to solution
    optimizer.optimize()
    iterations_to_solution = optimizer.iteration

    # Test with metaoptimize, assert that iterations to solution is lower
    optimizer.optimize_hyperparameters(smoothing=1, _meta_optimizer=genalg.GenAlg(None, None, 1, 1))
    optimizer.optimize()

    assert optimizer._get_hyperparameters() != prev_hyperparameters
    #assert optimizer.iteration < iterations_to_solution # Improvements are made
Beispiel #2
0
def test_genalg_problems():
    # Attempt to solve various problems
    # Assert that the optimizer can find the solutions
    optimizer = genalg.GenAlg(examplefunctions.ackley, 32, 
                              decode_func=examplefunctions.ackley_binary)
    optimizer._logging_func = lambda x, y, z : optimize._print_fitnesses(x, y, z, frequency=100)
    optimizer.optimize()
    assert optimizer.solution_found
Beispiel #3
0
def genalg_optimizer(sentence_dataset, overall_sentiment):
    """Optimize using a genetic algorithm"""

    dataset_length = len(sentence_dataset)
    binary_index_size = get_binary_index_size(dataset_length)
    chromosome_size = binary_index_size * NUM_KEY_POINTS

    def genalg_fitness(chromosome, binary_index_size, **kwargs):
        indexes = decode_chromosome(chromosome, binary_index_size)
        return get_fitness(indexes, **kwargs)

    #optimize to find the best key points
    optimizer = genalg.GenAlg(genalg_fitness,
                              chromosome_size,
                              sentence_dataset=sentence_dataset,
                              overall_sentiment=overall_sentiment,
                              binary_index_size=binary_index_size)
    return optimizer
Beispiel #4
0
def test_metaoptimize_gsa():
    optimizer = gsa.GSA(examplefunctions.ackley,
                        2, [-5.0] * 2, [5.0] * 2,
                        max_iterations=1000,
                        decode_func=examplefunctions.decode_real)
    optimizer._logging_func = lambda x, y, z: optimize._print_fitnesses(
        x, y, z, frequency=100)
    prev_hyperparameters = optimizer._get_hyperparameters()

    # Test without metaoptimize, save iterations to solution
    optimizer.optimize()
    iterations_to_solution = optimizer.iteration

    # Test with metaoptimize, assert that iterations to solution is lower
    optimizer.optimize_hyperparameters(smoothing=1,
                                       _meta_optimizer=genalg.GenAlg(
                                           None, None, 1, 1))
    optimizer.optimize()

    assert optimizer._get_hyperparameters() != prev_hyperparameters
Beispiel #5
0
        # will be included (avoiding the need to access protected members)
        optimizer._fitness_function = function['func']
        optimizer._additional_parameters['decode_func'] = function['decode']

        # Make a copy, or we'll only have one optimizer repeated in our list
        optimizers.append(copy.deepcopy(optimizer))

    # Get our benchmark stats
    all_stats = benchmark.compare(optimizers, 100)
    return benchmark.aggregate(all_stats)


# Create the genetic algorithm configurations to compare
# In reality, we would also want to optimize other hyperparameters
ga_onepoint = genalg.GenAlg(None,
                            32,
                            crossover_function=gaoperators.one_point_crossover)
ga_uniform = genalg.GenAlg(None,
                           32,
                           crossover_function=gaoperators.uniform_crossover)

# Run a benchmark for multiple problems, for robust testing
onepoint_stats = benchmark_multi(ga_onepoint)
uniform_stats = benchmark_multi(ga_uniform)

print()
print('One Point')
pprint.pprint(onepoint_stats)
print()
print('Uniform')
pprint.pprint(uniform_stats)
Beispiel #6
0
def test_genalg_sphere():
    optimizer = genalg.GenAlg(examplefunctions.sphere, 32, 
                              decode_func=examplefunctions.ackley_binary)
    optimizer._logging_func = lambda x, y, z : optimize._print_fitnesses(x, y, z, frequency=100)
    optimizer.optimize()
    assert optimizer.solution_found
Beispiel #7
0
def optimize_keypoints(sentence_dataset, overall_sentiment):
    """Given a dataset of sentences, return the N sentences that best describe the product."""

    dataset_length = len(sentence_dataset)
    #determine the size of 1/Nth of the chromosome
    #len = 2^b
    binary_index_size = int(math.ceil(math.log(dataset_length, 2)))

    chromosome_size = binary_index_size * NUM_KEY_POINTS

    def decode_chromosome(chromosome):
        """Turn a binary chromosome into indexes for the dataset."""
        #NOTE: this encoding may be problematic with large datasets
        #because there will be a large number of invalid indexes
        indexes = []
        #for every chunk in the chromosome of size "binary_index_size"
        for i in range(0, chromosome_size, binary_index_size):
            #take a chunk from the chromosome of size "binary_index_size", starting at i
            #turn the chunk into an int, and add it as an index
            indexes.append(
                gahelpers.binary_to_int(chromosome[i:i + binary_index_size],
                                        0))
        return indexes

    def get_fitness(chromosome):
        """Determine the fitness of some potential key points."""
        indexes = decode_chromosome(chromosome)
        #if any index is invalid or repeated
        #return a very low fitness
        for i in range(len(indexes)):
            #if index is out of range
            if indexes[i] > dataset_length - 1:
                return 0.000001
            #if index is repeated
            if indexes[i] in indexes[i + 1:]:
                return 0.000001

        #get the potential key points from the dataset
        key_points = [sentence_dataset[index] for index in indexes]

        total_sentiment = sum(
            [key_point['sentiment'] for key_point in key_points])
        sentiment_magnitude = sum(
            [key_point['sentiment_magnitude'] for key_point in key_points])

        #calculate fitness
        #fitness if a function of how close the sum of fitness is to the overall fitness
        #and the magnitude of the sentiments
        sentiment_closeness = 1.0 / ((
            (overall_sentiment - total_sentiment)**2) + 1)
        fitness = sentiment_closeness * 10 + sentiment_magnitude

        return fitness

    #optimize to find the best key points
    optimizer = genalg.GenAlg(get_fitness, chromosome_size)
    optimizer.logging = False  #disble output logging to avoid spamming the console, enable for debugging
    best_chromosome = optimizer.run_genalg()

    #return the key points
    indexes = decode_chromosome(best_chromosome)
    return [sentence_dataset[index] for index in indexes]