Exemple #1
0
def run_sa(t, CE):
    fname = outfile.format('SA{}'.format(CE), str(t + 1))
    with open(fname, 'a+') as f:
        content = f.read()
        if "fitness" not in content:
            f.write('iterations,fitness,time,fevals\n')
    ef = FlipFlopEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    sa = SimulatedAnnealing(1E10, CE, hcp)
    fit = FixedIterationTrainer(sa, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(sa.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        # print st
        base.write_to_file(fname, st)
    return
Exemple #2
0
def run_mimic(t, samples, keep, m):
    fname = outfile.format('MIMIC{}_{}_{}'.format(samples, keep, m),
                           str(t + 1))
    base.write_header(fname)
    ef = FlipFlopEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    df = DiscreteDependencyTree(m, ranges)
    pop = GenericProbabilisticOptimizationProblem(ef, odd, df)
    mimic = MIMIC(samples, keep, pop)
    fit = FixedIterationTrainer(mimic, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(mimic.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        # print st
        base.write_to_file(fname, st)
    return
Exemple #3
0
def run_ga(t, pop, mate, mutate):
    fname = outfile.format('GA{}_{}_{}'.format(pop, mate, mutate), str(t + 1))
    with open(fname, 'a+') as f:
        content = f.read()
        if "fitness" not in content:
            f.write('iterations,fitness,time,fevals\n')
    ef = FlipFlopEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    ga = StandardGeneticAlgorithm(pop, mate, mutate, gap)
    fit = FixedIterationTrainer(ga, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(ga.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        # print st
        base.write_to_file(fname, st)
    return
Exemple #4
0
def run_rhc(t):
    fname = outfile.format('RHC', str(t + 1))
    ef = FlipFlopEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    rhc = RandomizedHillClimbing(hcp)
    fit = FixedIterationTrainer(rhc, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(rhc.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        # print st
        base.write_to_file(fname, st)
    return
Exemple #5
0
    with open(fname, 'w') as f:
        f.write('iterations,fitness,time,fevals\n')
    ef = FlipFlopEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    rhc = RandomizedHillClimbing(hcp)
    fit = FixedIterationTrainer(rhc, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(rhc.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        print(st)
        with open(fname, 'a') as f:
            f.write(st)

# SA
for t in range(numTrials):
    for CE in [0.15, 0.35, 0.55, 0.75, 0.95]:
        fname = outfile.format('SA{}'.format(CE), str(t + 1))
        with open(fname, 'w') as f:
            f.write('iterations,fitness,time,fevals\n')
        ef = FlipFlopEvaluationFunction()
        odd = DiscreteUniformDistribution(ranges)
        nf = DiscreteChangeOneNeighbor(ranges)
Exemple #6
0
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

rhc = RandomizedHillClimbing(hcp)
sa = SimulatedAnnealing(1E11, .95, hcp)
ga = StandardGeneticAlgorithm(200, 100, 10, gap)
mimic = MIMIC(200, 20, pop)

rhc_f = open('out/op/flipflop/rhc.csv', 'w')
sa_f = open('out/op/flipflop/sa.csv', 'w')
ga_f = open('out/op/flipflop/ga.csv', 'w')
mimic_f = open('out/op/flipflop/mimic.csv', 'w')

for i in range(ITERATIONS):
    rhc.train()
    rhc_fitness = ef.value(rhc.getOptimal())
    rhc_f.write('{},{}\n'.format(i, rhc_fitness))

    sa.train()
    sa_fitness = ef.value(sa.getOptimal())
    sa_f.write('{},{}\n'.format(i, sa_fitness))

    ga.train()
    ga_fitness = ef.value(ga.getOptimal())
    ga_f.write('{},{}\n'.format(i, ga_fitness))

    mimic.train()
    mimic_fitness = ef.value(mimic.getOptimal())
    mimic_f.write('{},{}\n'.format(i, mimic_fitness))

rhc_f.close()
#                 f.write(st)

# MIMIC
for t in range(numTrials):
    for samples, keep, m in product([100], [50], [0.7]):
        fname = outfile.format('MIMIC{}_{}_{}'.format(samples, keep, m), str(t + 1))
        with open(fname, 'w') as f:
            f.write('iterations,fitness,time,fevals\n')
        ef = FlipFlopEvaluationFunction()
        odd = DiscreteUniformDistribution(ranges)
        nf = DiscreteChangeOneNeighbor(ranges)
        mf = DiscreteChangeOneMutation(ranges)
        cf = SingleCrossOver()
        gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
        df = DiscreteDependencyTree(m, ranges)
        pop = GenericProbabilisticOptimizationProblem(ef, odd, df)
        mimic = MIMIC(samples, keep, pop)
        fit = FixedIterationTrainer(mimic, 10)
        times = [0]
        for i in range(0, maxIters, 10):
            start = clock()
            fit.train()
            elapsed = time.clock() - start
            times.append(times[-1] + elapsed)
            # fevals = ef.fevals
            score = ef.value(mimic.getOptimal())
            # ef.fevals -= 1
            st = '{},{},{}\n'.format(i, score, times[-1])
            print st
            with open(fname, 'a') as f:
                f.write(st)
Exemple #8
0
        print "Examining " + name + " trainer..."

        # if len(hypers.values()) > 0:
        for exper in itertools.product(*values):

            # here implement hyper params
            rep_times = []
            for r in range(reps):
                # create a new trainer each time
                row = {}
                for key, value in zip(keys, exper):
                    row[key] = value
                print(row)
                fit = factory(row)
                # have to reset func eval per run
                fff.func_evals = 0
                for i in range(0, max_iterations, num_iterations):
                    start = clock()
                    fit.train()
                    stop = clock()
                    rep_times.append(stop - start)
                    func_eval = fff.func_evals
                    fitness = fff.value(fit.trainer.getOptimal())
                    # log
                    line = [name, r, i, fitness, rep_times[-1], func_eval
                            ] + list(exper)
                    line = [str(x) for x in line]
                    out.write(','.join(line) + os.linesep)

        print "Done " + name + " trainer..."
hill_climbing_output = "flipflop_hill_climbing.csv"
annealing_output = "flipflop_annealing.csv"
genetic_output = "flipflop_genetic.csv"
mimic_output = "flipflop_mimic.csv"

# HILL CLIMBING
for i in range(trials):
    rhc = RandomizedHillClimbing(hcp)
    fit = FixedIterationTrainer(rhc, 200000)

    start = clock()
    fit.train()
    end = clock()
    total_time = end - start
    max_fit = ef.value(rhc.getOptimal())
    time_optimum = [total_time, max_fit]
    hill_climbing.append(time_optimum)
    print "RHC Optimum: " + str(ef.value(rhc.getOptimal()))


#
# ANNEALING
for i in range(trials):
    sa = SimulatedAnnealing(1E12, .999, hcp)
    fit = FixedIterationTrainer(sa, 200000)
    start = clock()
    fit.train()
    end = clock()
    total_time = end - start
    max_fit = ef.value(sa.getOptimal())
Exemple #10
0
        ranges = array('i', [2] * N)
        fitness = FlipFlopEvaluationFunction()
        discrete_dist = DiscreteUniformDistribution(ranges)
        discrete_neighbor = DiscreteChangeOneNeighbor(ranges)
        discrete_mutation = DiscreteChangeOneMutation(ranges)
        crossover = SCO()
        discrete_dependency = DiscreteDependencyTree(.1, ranges)
        hill_problem = GHC(fitness, discrete_dist, discrete_neighbor)

        start = time.clock()
        rhc_problem = RHC(hill_problem)
        fit = FixedIterationTrainer(rhc_problem, iteration)
        fit.train()
        end = time.clock()
        full_time = end - start
        rhc_total += fitness.value(rhc_problem.getOptimal())
        rhc_time += full_time

    rhc_total_avg = rhc_total/runs
    rhc_time_avg = rhc_time/runs

    data = '{},{},{}\n'.format(iteration, rhc_total_avg, rhc_time_avg)
    print(data)
    with open(output_directory, 'a') as f:
        f.write(data)


#SA
output_directory = "Results/Small/FlipFlop_SA.csv"
with open(output_directory, 'w') as f:
    f.write('iterations,fitness,time\n')
Exemple #11
0
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    rhc = RandomizedHillClimbing(hcp)
    fit = FixedIterationTrainer(rhc, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        print ef.fevals
        fevals = ef.fevals
        x = rhc.getOptimal()
        #print x
        score = ef.value(x)
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        print st
        with open(fname, 'a') as f:
            f.write(st)
#sys.exit(1)
# SA
for t in range(numTrials):
    for CE in [0.15, 0.35, 0.55, 0.75, 0.95]:
        fname = outfile.format('SA{}'.format(CE), str(t + 1))
        with open(fname, 'w') as f:
            f.write('iterations,fitness,time,fevals\n')
        ef = FlipFlopEvaluationFunction()
        odd = DiscreteUniformDistribution(ranges)
        nf = DiscreteChangeOneNeighbor(ranges)