Exemplo n.º 1
0
def run_mimic(t, samples, keep, m):
    fname = outfile.format('MIMIC{}_{}_{}'.format(samples, keep, m),
                           str(t + 1))
    base.write_header(fname)
    ef = CountOnesEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    df = DiscreteDependencyTree(m, ranges)
    pop = GenericProbabilisticOptimizationProblem(ef, odd, df)
    mimic = MIMIC(samples, keep, pop)
    fit = FixedIterationTrainer(mimic, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(mimic.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        # print st
        base.write_to_file(fname, st)
    return
Exemplo n.º 2
0
def run_sa(t, CE):
    fname = outfile.format('SA{}'.format(CE), str(t + 1))
    base.write_header(fname)
    ef = CountOnesEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    sa = SimulatedAnnealing(1E10, CE, hcp)
    fit = FixedIterationTrainer(sa, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(sa.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        # print st
        base.write_to_file(fname, st)
    return
Exemplo n.º 3
0
def run_rhc(t):
    fname = outfile.format('RHC', str(t + 1))
    base.write_header(fname)
    ef = CountOnesEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    rhc = RandomizedHillClimbing(hcp)
    fit = FixedIterationTrainer(rhc, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(rhc.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        # print fname, st
        base.write_to_file(fname, st)

    return
Exemplo n.º 4
0
def run_ga(t, pop, mate, mutate):
    fname = outfile.format('GA{}_{}_{}'.format(pop, mate, mutate), str(t + 1))
    base.write_header(fname)
    ef = CountOnesEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    ga = StandardGeneticAlgorithm(pop, mate, mutate, gap)
    fit = FixedIterationTrainer(ga, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(ga.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        # print st
        base.write_to_file(fname, st)
    return
Exemplo n.º 5
0
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

iters = 5000

t0 = time.time()
calls = []
results = []
for _ in range(runs):
    rhc = RandomizedHillClimbing(hcp)
    fit = FixedIterationTrainer(rhc, iters)
    fitness = fit.train()
    results.append(ef.value(rhc.getOptimal()))
    calls.append(ef.getTotalCalls())
    ef.clearCount()
print "RHC, average results , " + str(
    sum(results) / float(runs)) + ", countones-%d.txt" % N
print "RHC, average feval calls , " + str(
    sum(calls) / float(runs)) + ", countones-%d.txt" % N
t1 = time.time() - t0
print "RHC, average time , " + str(float(t1) / runs) + ", countones-%d.txt" % N

t0 = time.time()
calls = []
results = []
for _ in range(runs):
    sa = SimulatedAnnealing(1e10, .95, hcp)
    fit = FixedIterationTrainer(sa, iters)
Exemplo n.º 6
0
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

times = ""
print "RHC:"
for x in range(20):
    start = time.time()
    iterations = (x + 1) * 250
    rhc = RandomizedHillClimbing(hcp)
    fit = FixedIterationTrainer(rhc, iterations)
    fit.train()
    print(str(ef.value(rhc.getOptimal())))
    end = time.time()
    times += "\n%0.03f" % (end - start)
print(times)

times = ""
print "SA:"
for x in range(20):
    start = time.time()
    iterations = (x + 1) * 250
    sa = SimulatedAnnealing(100, .95, hcp)
    fit = FixedIterationTrainer(sa, iterations)
    fit.train()
    print(str(ef.value(sa.getOptimal())))
    end = time.time()
    times += "\n%0.03f" % (end - start)
Exemplo n.º 7
0
   runs : number of runs to average over
"""

fill = [2] * N
ranges = array('i', fill)

ef = CountOnesEvaluationFunction()
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

t0 = time.time()
calls = []
results = []
for _ in range(runs):
    mimic = MIMIC(samples, tokeep, pop)
    fit = FixedIterationTrainer(mimic, 100)
    fitness = fit.train()
    results.append(ef.value(mimic.getOptimal()))
    calls.append(ef.getTotalCalls())
    ef.clearCount()
print "MIMIC, average results, " + str(sum(results)/float(runs)) + ", countones_MIMIC-%d-%d-%d.txt" % (N, samples, tokeep)
print "MIMIC, average feval calls , " + str(sum(calls)/float(runs)) + ", countones_MIMIC-%d-%d-%d.txt" % (N, samples, tokeep)
t1 = time.time() - t0
print "MIMIC, average time , " + str(t1/float(runs)) + ", countones_MIMIC-%d-%d-%d.txt" % (N, samples, tokeep)
Exemplo n.º 8
0
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

rhc = RandomizedHillClimbing(hcp)
sa = SimulatedAnnealing(100, .95, hcp)
ga = StandardGeneticAlgorithm(20, 20, 0, gap)
mimic = MIMIC(50, 10, pop)

rhc_f = open('out/op/countones/rhc.csv', 'w')
sa_f = open('out/op/countones/sa.csv', 'w')
ga_f = open('out/op/countones/ga.csv', 'w')
mimic_f = open('out/op/countones/mimic.csv', 'w')

for i in range(ITERATIONS):
    rhc.train()
    rhc_fitness = ef.value(rhc.getOptimal())
    rhc_f.write('{},{}\n'.format(i, rhc_fitness))

    sa.train()
    sa_fitness = ef.value(sa.getOptimal())
    sa_f.write('{},{}\n'.format(i, sa_fitness))

    ga.train()
    ga_fitness = ef.value(ga.getOptimal())
    ga_f.write('{},{}\n'.format(i, ga_fitness))

    mimic.train()
    mimic_fitness = ef.value(mimic.getOptimal())
    mimic_f.write('{},{}\n'.format(i, mimic_fitness))

rhc_f.close()
Exemplo n.º 9
0
   N : number in the test vector
   runs : number of runs to average over
"""

fill = [2] * N
ranges = array('i', fill)

ef = CountOnesEvaluationFunction()
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

t0 = time.time()
calls = []
results = []
for _ in range(runs):
    ga = StandardGeneticAlgorithm(ga_pop, ga_keep, ga_mut, gap)
    fit = FixedIterationTrainer(ga, 150)
    fitness = fit.train()
    results.append(ef.value(ga.getOptimal()))
    calls.append(ef.getTotalCalls())
    ef.clearCount()
print "GA, average results , " + str(sum(results)/float(runs)) + ", countones_ga_%d-%d-%d-%d-%d.txt" % (N, ga_pop,co_type,ga_keep,ga_mut)
print "GA, average feval calls , " + str(sum(calls)/float(runs)) + ", countones_ga_%d-%d-%d-%d-%d.txt" % (N, ga_pop,co_type,ga_keep,ga_mut)
t1 = time.time() - t0
print "GA, average time , " + str(t1/float(runs)) + ", countones_ga_%d-%d-%d-%d-%d.txt" % (N, ga_pop,co_type,ga_keep,ga_mut)
Exemplo n.º 10
0
ranges = array('i', fill)

ef = CountOnesEvaluationFunction()
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

rhc = RandomizedHillClimbing(hcp)
fit = FixedIterationTrainer(rhc, 200)
fit.train()
print "RHC: " + str(ef.value(rhc.getOptimal()))

sa = SimulatedAnnealing(100, .95, hcp)
fit = FixedIterationTrainer(sa, 200)
fit.train()
print "SA: " + str(ef.value(sa.getOptimal()))

ga = StandardGeneticAlgorithm(20, 20, 0, gap)
fit = FixedIterationTrainer(ga, 300)
fit.train()
print "GA: " + str(ef.value(ga.getOptimal()))

mimic = MIMIC(50, 10, pop)
fit = FixedIterationTrainer(mimic, 100)
fit.train()
print "MIMIC: " + str(ef.value(mimic.getOptimal()))
Exemplo n.º 11
0
ef = CountOnesEvaluationFunction()
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

start = time.time()
rhc = RandomizedHillClimbing(hcp)
rhcIterations = 100
fit = FixedIterationTrainer(rhc, rhcIterations)
fit.train()
print("\nRHC: " + str(ef.value(rhc.getOptimal())))
print("RHC Iterations: " + str(rhcIterations))
end = time.time()
traintime = end - start
print("RHC results time: %0.03f seconds" % (traintime,))

start = time.time()
sa = SimulatedAnnealing(100, .95, hcp)
saIterations = 200
fit = FixedIterationTrainer(sa, saIterations)
fit.train()
print("\nSA: " + str(ef.value(sa.getOptimal())))
print("SA Iterations: " + str(saIterations))
end = time.time()
traintime = end - start
print("SA results time: %0.03f seconds" % (traintime,))
Exemplo n.º 12
0
ranges = array('i', fill)

ef = CountOnesEvaluationFunction()
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

rhc = RandomizedHillClimbing(hcp)
fit = FixedIterationTrainer(rhc, 200)
fit.train()
print "RHC: " + str(ef.value(rhc.getOptimal()))

sa = SimulatedAnnealing(100, .95, hcp)
fit = FixedIterationTrainer(sa, 200)
fit.train()
print "SA: " + str(ef.value(sa.getOptimal()))

ga = StandardGeneticAlgorithm(20, 20, 0, gap)
fit = FixedIterationTrainer(ga, 300)
fit.train()
print "GA: " + str(ef.value(ga.getOptimal()))

mimic = MIMIC(50, 10, pop)
fit = FixedIterationTrainer(mimic, 100)
fit.train()
print "MIMIC: " + str(ef.value(mimic.getOptimal()))
Exemplo n.º 13
0
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

rhc = RandomizedHillClimbing(hcp)
print "Random Hill Climb"
iters_list = [10, 25, 50, 100, 200, 300, 400, 500]
for iters in iters_list:
    fit = FixedIterationTrainer(rhc, iters)
    start = time.time()
    fit.train()
    duration = time.time() - start
    print "Iterations: " + str(iters) + ", Fitness: " + str(
        ef.value(rhc.getOptimal())), ", Duration: " + str(duration)

print "Simulated Annealing"
for iters in iters_list:
    # Initial temperature of 100, with a cooling factor of .95 every iteration.
    # Thus the temperature will be 100 for the first iteration, 95 for the second, 90.25, 87.65...
    sa = SimulatedAnnealing(100, .95, hcp)
    fit = FixedIterationTrainer(sa, iters)
    start = time.time()
    fit.train()
    duration = time.time() - start
    print "Iterations: " + str(iters) + ", Fitness: " + str(
        ef.value(sa.getOptimal())), ", Duration: " + str(duration)

print "Simulated Annealing - Temperatures"
temps = [100, 90, 75, 50, 25, 10]
Exemplo n.º 14
0
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()
    df = DiscreteDependencyTree(.1, ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

    rhc = RandomizedHillClimbing(hcp)
    fit = FixedIterationTrainer(rhc, N)
    start = time.time()
    fit.train()
    end = time.time()
    training_time = end - start
    print "RHC: " + str(ef.value(rhc.getOptimal()))
    OUTFILE = "%s%s.csv" % (OUTFILE_BASE, "RHC")
    with open(OUTFILE, 'a+') as f:
        f.write("%d,%f,%f\n" % (N, training_time, ef.value(rhc.getOptimal())))

    sa = SimulatedAnnealing(1E11, .95, hcp)
    fit = FixedIterationTrainer(sa, N)
    start = time.time()
    fit.train()
    end = time.time()
    training_time = end - start
    print "SA: " + str(ef.value(sa.getOptimal()))
    OUTFILE = "%s%s.csv" % (OUTFILE_BASE, "SA")
    with open(OUTFILE, 'a+') as f:
        f.write("%d,%f,%f\n" % (N, training_time, ef.value(sa.getOptimal())))
Exemplo n.º 15
0
def run_count_ones_experiments():
    OUTPUT_DIRECTORY = './output'
    N = 80
    fill = [2] * N
    ranges = array('i', fill)
    ef = CountOnesEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    max_iter = 5000
    outfile = OUTPUT_DIRECTORY + '/count_ones_{}_log.csv'

    # Randomized Hill Climber
    filename = outfile.format('rhc')
    with open(filename, 'w') as f:
        f.write('iteration,fitness,time\n')
    for it in range(0, max_iter, 10):
        rhc = RandomizedHillClimbing(hcp)
        fit = FixedIterationTrainer(rhc, it)
        start_time = time.clock()
        fit.train()
        elapsed_time = time.clock() - start_time
        # fevals = ef.fevals
        score = ef.value(rhc.getOptimal())
        data = '{},{},{}\n'.format(it, score, elapsed_time)
        print(data)
        with open(filename, 'a') as f:
            f.write(data)

    # Simulated Annealing
    filename = outfile.format('sa')
    with open(filename, 'w') as f:
        f.write('iteration,cooling_value,fitness,time\n')
    for cooling_value in (.19, .38, .76, .95):
        for it in range(0, max_iter, 10):
            sa = SimulatedAnnealing(100, cooling_value, hcp)
            fit = FixedIterationTrainer(sa, it)
            start_time = time.clock()
            fit.train()
            elapsed_time = time.clock() - start_time
            # fevals = ef.fevals
            score = ef.value(sa.getOptimal())
            data = '{},{},{},{}\n'.format(it, cooling_value, score, elapsed_time)
            print(data)
            with open(filename, 'a') as f:
                f.write(data)

    # Genetic Algorithm
    filename = outfile.format('ga')
    with open(filename, 'w') as f:
        f.write('iteration,population_size,to_mate,to_mutate,fitness,time\n')
    for population_size, to_mate, to_mutate in itertools.product([20], [4, 8, 16, 20], [0, 2, 4, 6]):
        for it in range(0, max_iter, 10):
            ga = StandardGeneticAlgorithm(population_size, to_mate, to_mutate, gap)
            fit = FixedIterationTrainer(ga, it)
            start_time = time.clock()
            fit.train()
            elapsed_time = time.clock() - start_time
            # fevals = ef.fevals
            score = ef.value(ga.getOptimal())
            data = '{},{},{},{},{},{}\n'.format(it, population_size, to_mate, to_mutate, score, elapsed_time)
            print(data)
            with open(filename, 'a') as f:
                f.write(data)

    # MIMIC
    filename = outfile.format('mm')
    with open(filename, 'w') as f:
        f.write('iterations,samples,to_keep,m,fitness,time\n')
    for samples, to_keep, m in itertools.product([50], [10], [0.1, 0.3, 0.5, 0.7, 0.9]):
        for it in range(0, 500, 10):
            df = DiscreteDependencyTree(m, ranges)
            pop = GenericProbabilisticOptimizationProblem(ef, odd, df)
            mm = MIMIC(samples, 20, pop)
            fit = FixedIterationTrainer(mm, it)
            start_time = time.clock()
            fit.train()
            elapsed_time = time.clock() - start_time
            # fevals = ef.fevals
            score = ef.value(mm.getOptimal())
            data = '{},{},{},{},{},{}\n'.format(it, samples, to_keep, m, score, elapsed_time)
            print(data)
            with open(filename, 'a') as f:
                f.write(data)
Exemplo n.º 16
0
    with open(fname, 'w') as f:
        f.write('iterations,fitness,time,fevals\n')
    ef = CountOnesEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    rhc = RandomizedHillClimbing(hcp)
    fit = FixedIterationTrainer(rhc, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = i
        score = ef.value(rhc.getOptimal())
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        print st
        with open(fname, 'a') as f:
            f.write(st)

# SA
for t in range(numTrials):
    for CE in [0.15, 0.35, 0.55, 0.75, 0.95]:
        fname = outfile.format('SA{}'.format(CE), str(t + 1))
        with open(fname, 'w') as f:
            f.write('iterations,fitness,time,fevals\n')
        ef = CountOnesEvaluationFunction()
        odd = DiscreteUniformDistribution(ranges)
        nf = DiscreteChangeOneNeighbor(ranges)
        hcp = GenericHillClimbingProblem(ef, odd, nf)
Exemplo n.º 17
0
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)


iters = 5000

t0 = time.time()
calls = []
results = []
for _ in range(runs):
    rhc = RandomizedHillClimbing(hcp)
    fit = FixedIterationTrainer(rhc, iters)
    fitness = fit.train()
    results.append(ef.value(rhc.getOptimal()))
    calls.append(ef.getTotalCalls())    
    ef.clearCount()
print "RHC, average results , " + str(sum(results)/float(runs)) + ", countones-%d.txt" % N
print "RHC, average feval calls , " + str(sum(calls)/float(runs)) + ", countones-%d.txt" % N
t1 = time.time() - t0
print "RHC, average time , " + str(float(t1)/runs) + ", countones-%d.txt" % N


t0 = time.time()
calls = []
results = []
for _ in range(runs):
    sa = SimulatedAnnealing(1e10, .95, hcp)
    fit = FixedIterationTrainer(sa, iters)
    fitness = fit.train()
Exemplo n.º 18
0
    start_sa = time.time()
    fit_sa.train()
    end_sa = time.time()
    
    start_ga = time.time()
    fit_ga.train()
    end_ga = time.time()
    
    start_mimic = time.time()
    fit_mimic.train()
    end_mimic = time.time()

    # Result handling
    last_train_time_rhc = end_rhc - start_rhc
    rhc_train_time[repetition].append(last_train_time_rhc)
    rhc_accuracy[repetition].append(ef.value(rhc.getOptimal()))

    last_train_time_sa = end_sa - start_sa
    sa_train_time[repetition].append(last_train_time_sa)
    sa_accuracy[repetition].append(ef.value(sa.getOptimal()))

    last_train_time_ga = end_ga - start_ga
    ga_train_time[repetition].append(last_train_time_ga)
    ga_accuracy[repetition].append(ef.value(ga.getOptimal()))

    last_train_time_mimic = end_mimic - start_mimic
    mimic_train_time[repetition].append(last_train_time_mimic)
    mimic_accuracy[repetition].append(ef.value(mimic.getOptimal()))

    while current_iteration_count <= MAX_ITERATION - ITERATION_STEP:
        print("Computing for %d iterations" % (current_iteration_count + ITERATION_STEP))
from time import time
f = open("experiments/results/countones_optimal_1000.txt", "w")

f.write("starting RHC\n")
rhc = RandomizedHillClimbing(hill_climbing_problem)
score = 0
iters = 0
t0 = time()

while iters < 60000:
    score = rhc.train()
    f.write(str(iters) + str(score) +"\n")
    iters += 1


print "RHC: " + str(ef.value(rhc.getOptimal())), "time taken", time() - t0, "Iterations:", iters

f.write("starting SA\n")
sa = SimulatedAnnealing(1E13, .95, hill_climbing_problem)
t0 = time()
iters = 0
score = 0

while iters < 60000:
    score = sa.train()
    f.write(str(iters) + str(score))
    iters += 1

print "SA: " + str(ef.value(sa.getOptimal())), "time taken", time() - t0, "Iterations", iters

ga = StandardGeneticAlgorithm(200, 100, 10, genetic_problem)
Exemplo n.º 20
0
ef = CountOnesEvaluationFunction()
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

t0 = time.time()
calls = []
results = []
for _ in range(runs):
    ga = StandardGeneticAlgorithm(ga_pop, ga_keep, ga_mut, gap)
    fit = FixedIterationTrainer(ga, 150)
    fitness = fit.train()
    results.append(ef.value(ga.getOptimal()))
    calls.append(ef.getTotalCalls())
    ef.clearCount()
print "GA, average results , " + str(
    sum(results) / float(runs)) + ", countones_ga_%d-%d-%d-%d-%d.txt" % (
        N, ga_pop, co_type, ga_keep, ga_mut)
print "GA, average feval calls , " + str(
    sum(calls) / float(runs)) + ", countones_ga_%d-%d-%d-%d-%d.txt" % (
        N, ga_pop, co_type, ga_keep, ga_mut)
t1 = time.time() - t0
print "GA, average time , " + str(
    t1 / float(runs)) + ", countones_ga_%d-%d-%d-%d-%d.txt" % (
        N, ga_pop, co_type, ga_keep, ga_mut)
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

from time import time

rhc = RandomizedHillClimbing(hcp)
fit = FixedIterationTrainer(rhc, 600000)
t0 = time()
fit.train()
print "RHC: " + str(ef.value(rhc.getOptimal())), "time taken", time() - t0

sa = SimulatedAnnealing(1E11, .95, hcp)
fit = FixedIterationTrainer(sa, 600000)

t0 = time()
fit.train()
print "SA: " + str(ef.value(sa.getOptimal())), "time taken", time() - t0

ga = StandardGeneticAlgorithm(200, 100, 10, gap)
fit = FixedIterationTrainer(ga, 20000)

t0 = time()
fit.train()

print "GA: " + str(ef.value(ga.getOptimal())), "time taken", time() - t0