Пример #1
0
def run_mimic(t, samples, keep, m):
    fname = outfile.format('MIMIC{}_{}_{}'.format(samples, keep, m),
                           str(t + 1))
    base.write_header(fname)
    ef = FlipFlopEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    df = DiscreteDependencyTree(m, ranges)
    pop = GenericProbabilisticOptimizationProblem(ef, odd, df)
    mimic = MIMIC(samples, keep, pop)
    fit = FixedIterationTrainer(mimic, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(mimic.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        # print st
        base.write_to_file(fname, st)
    return
Пример #2
0
def run_sa(t, CE):
    fname = outfile.format('SA{}'.format(CE), str(t + 1))
    with open(fname, 'a+') as f:
        content = f.read()
        if "fitness" not in content:
            f.write('iterations,fitness,time,fevals\n')
    ef = FlipFlopEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    sa = SimulatedAnnealing(1E10, CE, hcp)
    fit = FixedIterationTrainer(sa, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(sa.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        # print st
        base.write_to_file(fname, st)
    return
Пример #3
0
def run_ga(t, pop, mate, mutate):
    fname = outfile.format('GA{}_{}_{}'.format(pop, mate, mutate), str(t + 1))
    with open(fname, 'a+') as f:
        content = f.read()
        if "fitness" not in content:
            f.write('iterations,fitness,time,fevals\n')
    ef = FlipFlopEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    ga = StandardGeneticAlgorithm(pop, mate, mutate, gap)
    fit = FixedIterationTrainer(ga, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(ga.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        # print st
        base.write_to_file(fname, st)
    return
Пример #4
0
    def get_ef(self):
        """Creates a new flip flop problem with the specified class variables.

        Returns:
            ranges (array): Array of values as specified by N.
            ef (FlipFlopEvaluationFunction): Evaluation function.

        """
        fill = [2] * self.N
        ranges = array('i', fill)

        return ranges, FlipFlopEvaluationFunction()
Пример #5
0
def run_rhc(t):
    fname = outfile.format('RHC', str(t + 1))
    ef = FlipFlopEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    rhc = RandomizedHillClimbing(hcp)
    fit = FixedIterationTrainer(rhc, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(rhc.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        # print st
        base.write_to_file(fname, st)
    return
Пример #6
0
def flipflop():
    N = 80
    fill = [2] * N
    ranges = array('i', fill)

    ef = FlipFlopEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()

    rhc_generic   ("FFloprhc80",   ef, odd, nf,      10.0, 10000, 10, 5)
    sa_generic    ("FFlopsa80",    ef, odd, nf,      10.0, 10000, 10, 5, ([1E12, 1E6], [0.999, 0.99, 0.95]))
    ga_generic    ("FFlopga80",    ef, odd, mf, cf,  50.0, 10000, 10, 1, ([2000, 200], [0.5, 0.25], [0.25, 0.1, 0.02]))
    mimic_discrete("FFlopmimic80", ef, odd, ranges, 300.0, 10000, 10, 1, ([200], [100], [0.1, 0.5, 0.9]))

    print "FFlop all done"
Пример #7
0
from base import *

# Adapted from https://github.com/JonathanTay/CS-7641-assignment-2/blob/master/flipflop.py
"""
Commandline parameter(s):
   none
"""

N = 1000
maxIters = 3001
numTrials = 5
fill = [2] * N
ranges = array('i', fill)
outfile = OUTPUT_DIRECTORY + '/FLIPFLOP/FLIPFLOP_{}_{}_LOG.csv'
ef = FlipFlopEvaluationFunction()
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

# RHC
for t in range(numTrials):
    fname = outfile.format('RHC', str(t + 1))
    with open(fname, 'w') as f:
        f.write('iterations,fitness,time,fevals\n')
    ef = FlipFlopEvaluationFunction()
Пример #8
0
import shared.FixedIterationTrainer as FixedIterationTrainer

from array import array
"""
Commandline parameter(s):
   none
"""

N = 80

fill = [2] * N
ranges = array('i', fill)

ITERATIONS = 5000

ef = FlipFlopEvaluationFunction()
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

rhc = RandomizedHillClimbing(hcp)
sa = SimulatedAnnealing(1E11, .95, hcp)
ga = StandardGeneticAlgorithm(200, 100, 10, gap)
mimic = MIMIC(200, 20, pop)

rhc_f = open('out/op/flipflop/rhc.csv', 'w')
Пример #9
0
from array import array
from time import clock  # use clock instead of typical time because better resolution
import os
import itertools
# import pandas as pd
# import numpy as np
"""
Commandline parameter(s):
   none
"""

N = 80
fill = [2] * N
ranges = array('i', fill)

fff = FlipFlopEvaluationFunction()
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(fff, odd, nf)
gap = GenericGeneticAlgorithmProblem(fff, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(fff, odd, df)

max_iterations = 3500
num_iterations = 10


def merge_two_dicts(x, y):
    z = x.copy()  # start with x's keys and values
Пример #10
0
Commandline parameter(s):
    none
"""

# set N value.  This is the number of points
N = 50
fill = [2] * N
random = Random()
ranges = array('i', fill)

points = [[0 for x in xrange(2)] for x in xrange(N)]
for i in range(0, len(points)):
    points[i][0] = random.nextDouble()
    points[i][1] = random.nextDouble()

ef = FlipFlopEvaluationFunction()
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)

# repeat a few times to get an average?
trials = 10 # more?

hill_climbing = []
annealing = []
genetic = []
mimic_data = []
Пример #11
0
def run_all_2(N=200,T=40,fout=None):
    problem = 'flipflop'
    # N=200
    # T=N/10
    maxEpochs = 10**5
    maxTime = 300 #5 minutes
    fill = [2] * N
    ranges = array('i', fill)

    # ef = FourPeaksEvaluationFunction(T)
    ef = FlipFlopEvaluationFunction()
    
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    # mf = SwapMutation()
    cf = SingleCrossOver()
    df = DiscreteDependencyTree(.1, ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    pop = GenericProbabilisticOptimizationProblem(ef, odd, df)
    

    def run_algo(alg,fit,label,difficulty,iters):
        trainTimes = [0.]
        trainTime = []
        scoreChange = [0.]
        stuckCount = 10**3
        prev = 0.
        for epoch in range(0,maxEpochs,1):
        
            st = time.clock()
            fit.train()
            et = time.clock()
            
            trainTimes.append(trainTimes[-1]+(et-st))
            trainTime.append((et-st))
            rollingMean = 10
            avgTime = (math.fsum(trainTime[-rollingMean:]) / float(rollingMean))
        
            score = ef.value(alg.getOptimal())
            # print(score,difficulty)
            
            # trialString = '{}-{}-{}-{}'.format(label,score,epoch,trainTimes[-1])
            trialData = [problem,difficulty,label,score,epoch,trainTimes[-1],avgTime,iters]
            print(trialData)
            # fout.writerow(trialData)
            
            optimum = (difficulty-1)
            if score >= optimum: break
            
            scoreChange.append(abs(score-prev))
            prev = score
            scoreChange = scoreChange[-stuckCount:]
            # print(scoreChange)
            if max(scoreChange) == 0: break
            
            if trainTimes[-1] > maxTime: break
            
    
        # print(trialData)
        fout.writerow(trialData)
        
        
    iters = 10
    rhc = RandomizedHillClimbing(hcp)
    fit = FixedIterationTrainer(rhc, iters)
    run_algo(rhc,fit,'RHC',N,iters)


    iters = 10
    startTemp = 1E10
    coolingFactor = .99
    sa = SimulatedAnnealing(startTemp, coolingFactor, hcp)
    fit = FixedIterationTrainer(sa, iters)
    run_algo(sa,fit,'SA',N,iters)

    iters = 10
    population = 300
    mates = 40
    mutations = 10
    ga = StandardGeneticAlgorithm(population, mates, mutations, gap)
    fit = FixedIterationTrainer(ga, iters)
    run_algo(ga,fit,'GA',N,iters)
    
    iters = 10
    samples = 200
    keep = 5
    mimic = MIMIC(samples, keep, pop)
    fit = FixedIterationTrainer(mimic, iters)
    run_algo(mimic,fit,'MIMIC',N,iters)
Пример #12
0
runs = 5

#RHC
output_directory = "Results/Small/FlipFlop_RHC.csv"
with open(output_directory, 'w') as f:
    f.write('iterations,fitness,time\n')


for i in range(len(iteration_list)):
    iteration = iteration_list[i]
    rhc_total = 0
    rhc_time = 0

    for x in range(runs):
        ranges = array('i', [2] * N)
        fitness = FlipFlopEvaluationFunction()
        discrete_dist = DiscreteUniformDistribution(ranges)
        discrete_neighbor = DiscreteChangeOneNeighbor(ranges)
        discrete_mutation = DiscreteChangeOneMutation(ranges)
        crossover = SCO()
        discrete_dependency = DiscreteDependencyTree(.1, ranges)
        hill_problem = GHC(fitness, discrete_dist, discrete_neighbor)

        start = time.clock()
        rhc_problem = RHC(hill_problem)
        fit = FixedIterationTrainer(rhc_problem, iteration)
        fit.train()
        end = time.clock()
        full_time = end - start
        rhc_total += fitness.value(rhc_problem.getOptimal())
        rhc_time += full_time