Exemple #1
0
def run_mimic(t, samples, keep, m):
    fname = outfile.format('MIMIC{}_{}_{}'.format(samples, keep, m),
                           str(t + 1))
    ef = ContinuousPeaksEvaluationFunction(T)
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    df = DiscreteDependencyTree(m, ranges)
    pop = GenericProbabilisticOptimizationProblem(ef, odd, df)
    mimic = MIMIC(samples, keep, pop)
    fit = FixedIterationTrainer(mimic, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(mimic.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        # print st
        base.write_to_file(fname, st)
    return
Exemple #2
0
def run_ga(t, pop, mate, mutate):
    fname = outfile.format('GA{}_{}_{}'.format(pop, mate, mutate), str(t + 1))
    with open(fname, 'a+') as f:
        content = f.read()
        if "fitness" not in content:
            f.write('iterations,fitness,time,fevals\n')
    ef = ContinuousPeaksEvaluationFunction(T)
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    ga = StandardGeneticAlgorithm(pop, mate, mutate, gap)
    fit = FixedIterationTrainer(ga, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(ga.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        # print st
        base.write_to_file(fname, st)
    return
Exemple #3
0
def run_sa(t, CE):
    fname = outfile.format('SA{}'.format(CE), str(t + 1))
    with open(fname, 'a+') as f:
        content = f.read()
        if "fitness" not in content:
            f.write('iterations,fitness,time,fevals\n')
    ef = ContinuousPeaksEvaluationFunction(T)
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    sa = SimulatedAnnealing(1E10, CE, hcp)
    fit = FixedIterationTrainer(sa, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(sa.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        # print st
        base.write_to_file(fname, st)
    return
Exemple #4
0
def solveit(oaname, params):
    N = 60
    T = N / 10
    fill = [2] * N
    ranges = array('i', fill)
    iterations = 10000
    tryi = 1

    ef = ContinuousPeaksEvaluationFunction(T)
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()
    df = DiscreteDependencyTree(.1, ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

    #  fit = FixedIterationTrainer(rhc, 200000)
    #  fit.train()

    if oaname == 'RHC':
        iterations = int(params[0])
        tryi = int(params[1])
        oa = RandomizedHillClimbing(hcp)
    if oaname == 'SA':
        oa = SimulatedAnnealing(float(params[0]), float(params[1]), hcp)
    if oaname == 'GA':
        oa = StandardGeneticAlgorithm(int(params[0]), int(params[1]),
                                      int(params[2]), gap)
    if oaname == 'MMC':
        oa = MIMIC(int(params[0]), int(params[1]), pop)

    print "Running %s using %s for %d iterations, try %d" % (
        oaname, ','.join(params), iterations, tryi)
    print "=" * 20
    starttime = timeit.default_timer()
    output = []
    for i in range(iterations):
        oa.train()
        if i % 10 == 0:
            optimal = oa.getOptimal()
            score = ef.value(optimal)
            elapsed = float(timeit.default_timer() - starttime)
            output.append([str(i), str(score), str(elapsed)])

    print 'score: %.3f' % score
    print 'train time: %.3f secs' % (int(timeit.default_timer() - starttime))

    scsv = 'cp-%s-%s.csv' % (oaname, '-'.join(params))
    print "Saving to %s" % (scsv),
    with open(scsv, 'w') as csvf:
        writer = csv.writer(csvf)
        for row in output:
            writer.writerow(row)
    print "saved."
    print "=" * 20
Exemple #5
0
    def get_ef(self):
        """Creates a new continuous peaks problem with the specified
        parameters.

        Returns:
            ranges (array): Array of values as specified by N.
            ef (ContinuousPeaksEvaluationFunction): Evaluation function.

        """
        fill = [2] * self.N
        ranges = array('i', fill)

        return ranges, ContinuousPeaksEvaluationFunction(self.T)
Exemple #6
0
def run_rhc(t):
    fname = outfile.format('RHC', str(t + 1))
    ef = ContinuousPeaksEvaluationFunction(T)
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    rhc = RandomizedHillClimbing(hcp)
    fit = FixedIterationTrainer(rhc, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(rhc.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        # print fname, st
        base.write_to_file(fname, st)

    return
T = N / 10
fill = [2] * N
ranges = array('i', fill)

maxIters = 2500
numTrials = 5

# module_path = os.path.dirname(os.path.realpath(__file__))
# output_path = os.path.join(module_path, 'output_file')
# outfile = module_path + '/CONTPEAKS/CONTPEAKS_{}_{}_LOG.csv'
# outfile = output_path + '\CONTPEAKS\CONTPEAKS_{}_{}_LOG.csv'
# outfile = 'C:\\Users\\gablanco\\gatech\\assignment2\\CONTPEAKS\\CONTPEAKS_{}_{}_LOG.csv'

outfile = 'CONTPEAKS_{}_{}_LOG.csv'

ef = ContinuousPeaksEvaluationFunction(T)
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

#rhc = RandomizedHillClimbing(hcp)
#fit = FixedIterationTrainer(rhc, 200000)
#fit.train()
#print "RHC: " + str(ef.value(rhc.getOptimal()))

# RHC
Exemple #8
0
from array import array
from time import clock
from itertools import product
"""
Commandline parameter(s):
   none
"""

N = 100
T = 15
maxIters = 5001
numTrials = 5
fill = [2] * N
ranges = array('i', fill)
outfile = './CONTPEAKS/CONTPEAKS_T15_@ALG@_@N@_LOG.txt'
ef = ContinuousPeaksEvaluationFunction(T)
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

# RHC
for t in range(numTrials):
    fname = outfile.replace('@ALG@', 'RHC').replace('@N@', str(t + 1))
    with open(fname, 'w') as f:
        f.write('iterations,fitness,time,fevals\n')
    ef = ContinuousPeaksEvaluationFunction(T)
# Random number generator */
N = 60
T = N / 10
fill = [2] * N
ranges = array('i', fill)

cycle = 1
n_iteration = 1000

sa_fitness = [[] for i in range(cycle)]
sa_training_time = [[] for i in range(cycle)]

for n in range(cycle):
    print("the %d th cycle" % (n + 1))

    ef = ContinuousPeaksEvaluationFunction(T)
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)

    for SA_COOLING_FACTOR in SA_COOLING_FACTOR_pool:
        sa = SimulatedAnnealing(SA_TEMPERATURE, SA_COOLING_FACTOR, hcp)
        fit_sa = FixedIterationTrainer(sa, n_iteration)

        print("calculating for cooling rate = %f" % SA_COOLING_FACTOR)

        # Training
        start_sa = time.time()
        fit_sa.train()
        end_sa = time.time()
Exemple #10
0
rhc_results = nested_dict()
sa_results = nested_dict()
ga_results = nested_dict()
mi_results = nested_dict()

# Two tasks: sweep params to figure out best settings, then run sweeps of iters, etc.
for trial_num in range(num_trials):
    print "On trial num: ", trial_num, " of total of ", num_trials
    for N in num_points:
        print "\tUsing num_points: ", N
        T = N / 10
        fill = [2] * N
        ranges = array('i', fill)

        ef = ContinuousPeaksEvaluationFunction(T)
        odd = DiscreteUniformDistribution(ranges)
        nf = DiscreteChangeOneNeighbor(ranges)
        mf = DiscreteChangeOneMutation(ranges)
        cf = SingleCrossOver()
        df = DiscreteDependencyTree(.1, ranges)
        hcp = GenericHillClimbingProblem(ef, odd, nf)
        gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
        pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

        if do_rhc:
            rhc = RandomizedHillClimbing(hcp)
            for iter in hc_iter:
                score, call_count, runtime = helpers.eval_algo(ef, rhc, iter)
                rhc_scores.append(score)
                rhc_times.append(runtime)
Exemple #11
0
BASE_PATH = os.getcwd() + "/"
OUTFILE_BASE = BASE_PATH + 'out/countinouspeaks/'

if not os.path.exists(OUTFILE_BASE):
    os.makedirs(OUTFILE_BASE)

for algo in ["RHC", "MIMIC", "GA", "SA"]:
    with open(OUTFILE_BASE + algo + ".csv", 'w') as f:
        f.write("iterations,training_time,fitness\n")

for N in xrange(38000, 500000, 1000):
    T = N / 10
    fill = [2] * N
    ranges = array('i', fill)

    ef = ContinuousPeaksEvaluationFunction(T)
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()
    df = DiscreteDependencyTree(.1, ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

    rhc = RandomizedHillClimbing(hcp)
    fit = FixedIterationTrainer(rhc, 100)
    start = time.time()
    fit.train()
    end = time.time()
    training_time = end - start
for param in params['RHC']:

    output_filename = 'CP %s-%s.csv' % ('RHC', identifier['RHC'](param))
    csv_file = open(output_filename, 'w')
    fields = ['num_iterations', 'value', 'time']
    writer = csv.DictWriter(csv_file, fieldnames=fields)
    writer.writeheader()

    for it_count in iterations:
        N = 60
        T = N / 10
        fill = [2] * N
        ranges = array('i', fill)

        ef = ContinuousPeaksEvaluationFunction(T)
        odd = DiscreteUniformDistribution(ranges)
        nf = DiscreteChangeOneNeighbor(ranges)
        mf = DiscreteChangeOneMutation(ranges)
        cf = SingleCrossOver()
        df = DiscreteDependencyTree(.1, ranges)
        hcp = GenericHillClimbingProblem(ef, odd, nf)
        gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
        pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

        start = time.time()
        rhc = RandomizedHillClimbing(hcp)
        fit = FixedIterationTrainer(rhc, it_count)
        fit.train()
        end = time.time()
        # print "Time -->", end - start
Exemple #13
0
from array import array
from time import clock  # use clock instead of typical time because better resolution
"""
Commandline parameter(s):
   none
"""

N = 60
T = N / 10
fill = [2] * N
ranges = array('i', fill)

max_iterations = 3500
num_iterations = 10

ef = ContinuousPeaksEvaluationFunction(T)
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)


def merge_two_dicts(x, y):
    z = x.copy()  # start with x's keys and values
    z.update(y)  # modifies z with y's keys and values & returns None
    return z
Exemple #14
0
    none
"""

# set N value.  This is the number of points
N = 50
fill = [2] * N
random = Random()
ranges = array('i', fill)
T = 20

points = [[0 for x in xrange(2)] for x in xrange(N)]
for i in range(0, len(points)):
    points[i][0] = random.nextDouble()
    points[i][1] = random.nextDouble()

ef = ContinuousPeaksEvaluationFunction(T)
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)

# repeat a few times to get an average?
trials = 10  # more?

hill_climbing = []
annealing = []
genetic = []
mimic_data = []
Exemple #15
0
rhc_fitness = [[] for i in range(cycle)]
rhc_training_time = [[] for i in range(cycle)]

sa_fitness = [[] for i in range(cycle)]
sa_training_time = [[] for i in range(cycle)]

ga_fitness = [[] for i in range(cycle)]
ga_training_time = [[] for i in range(cycle)]

mimic_fitness = [[] for i in range(cycle)]
mimic_training_time = [[] for i in range(cycle)]

for n in range(cycle):
    print("the %d th cycle" % (n + 1))

    ef = ContinuousPeaksEvaluationFunction(T)
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()
    df = DiscreteDependencyTree(.1, ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

    rhc = RandomizedHillClimbing(hcp)
    sa = SimulatedAnnealing(SA_TEMPERATURE, SA_COOLING_FACTOR, hcp)
    ga = StandardGeneticAlgorithm(GA_POPULATION, GA_CROSSOVER, GA_MUTATION,
                                  gap)
    mimic = MIMIC(MIMIC_SAMPLES, MIMIC_TO_KEEP, pop)
iteration_list = [10, 100, 500, 1000, 2500, 5000, 8000, 10000]
runs = 5

#RHC
output_directory = "Results/Small/Continuous_Peaks_RHC.csv"
with open(output_directory, 'w') as f:
    f.write('iterations,fitness,time\n')

for i in range(len(iteration_list)):
    iteration = iteration_list[i]
    rhc_total = 0
    rhc_time = 0

    for x in range(runs):
        ranges = array('i', [2] * N)
        fitness = ContinuousPeaksEvaluationFunction(T)
        discrete_dist = DiscreteUniformDistribution(ranges)
        discrete_neighbor = DiscreteChangeOneNeighbor(ranges)
        discrete_mutation = DiscreteChangeOneMutation(ranges)
        crossover = SCO()
        discrete_dependency = DiscreteDependencyTree(.1, ranges)
        hill_problem = GHC(fitness, discrete_dist, discrete_neighbor)

        start = time.clock()
        rhc_problem = RHC(hill_problem)
        fit = FixedIterationTrainer(rhc_problem, iteration)
        fit.train()
        end = time.clock()
        full_time = end - start
        rhc_total += fitness.value(rhc_problem.getOptimal())
        rhc_time += full_time
Exemple #17
0
import opt.prob.MIMIC as MIMIC
import opt.prob.ProbabilisticOptimizationProblem as ProbabilisticOptimizationProblem
import shared.FixedIterationTrainer as FixedIterationTrainer
import opt.example.ContinuousPeaksEvaluationFunction as ContinuousPeaksEvaluationFunction
from array import array
"""
Commandline parameter(s):
   none
"""

N = 60
T = N / 10
fill = [2] * N
ranges = array('i', fill)

ef = ContinuousPeaksEvaluationFunction(T)
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

rhc = RandomizedHillClimbing(hcp)
fit = FixedIterationTrainer(rhc, 200000)
fit.train()
print "RHC: " + str(ef.value(rhc.getOptimal()))

sa = SimulatedAnnealing(1E11, .95, hcp)
Commandline parameter(s):
   none
"""


print "About to run Vote Test. Yay."

vt = VoteTest()
vt.doIt()

N=60
T=N/10
fill = [2] * N
ranges = array('i', fill)

ef = ContinuousPeaksEvaluationFunction(T)
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

rhc = RandomizedHillClimbing(hcp)
fit = FixedIterationTrainer(rhc, 200000)
fit.train()
print "RHC: " + str(ef.value(rhc.getOptimal()))

sa = SimulatedAnnealing(1E11, .95, hcp)