Example #1
0
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        print st
        with open(fname, 'a') as f:
            f.write(st)

# SA
for t in range(numTrials):
    for CE in [0.15, 0.35, 0.55, 0.75, 0.95]:
        fname = outfile.format('SA{}'.format(CE), str(t + 1))
        with open(fname, 'w') as f:
            f.write('iterations,fitness,time,fevals\n')
        ef = TwoColorsEvaluationFunction()
        odd = DiscreteUniformDistribution(ranges)
        nf = DiscreteChangeOneNeighbor(ranges)
        hcp = GenericHillClimbingProblem(ef, odd, nf)
        sa = SimulatedAnnealing(1E10, CE, hcp)
        fit = FixedIterationTrainer(sa, 10)
        times = [0]
        for i in range(0, maxIters, 10):
            start = clock()
            fit.train()
            elapsed = time.clock() - start
            times.append(times[-1] + elapsed)
            fevals = ef.fevals
            score = ef.value(sa.getOptimal())
            ef.fevals -= 1
            st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
            print st
            with open(fname, 'a') as f:
                f.write(st)
ef = KnapsackEvaluationFunction(weights, volumes, KNAPSACK_VOLUME, copies)
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = UniformCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

rhc = RandomizedHillClimbing(hcp)
fit = FixedIterationTrainer(rhc, 200000)
fit.train()
print "RHC: " + str(ef.value(rhc.getOptimal()))

sa = SimulatedAnnealing(100, .95, hcp)
fit = FixedIterationTrainer(sa, 200000)
fit.train()
print "SA: " + str(ef.value(sa.getOptimal()))

ga = StandardGeneticAlgorithm(200, 150, 25, gap)
fit = FixedIterationTrainer(ga, 1000)
fit.train()
print "GA: " + str(ef.value(ga.getOptimal()))

mimic = MIMIC(200, 100, pop)
fit = FixedIterationTrainer(mimic, 1000)
fit.train()
print "MIMIC: " + str(ef.value(mimic.getOptimal()))
Example #3
0
        #         print("Early stopping at iteration {}".format(i))
        #         break
        #     else:
        #         early_stop.pop(0)

# SA
for t in range(numTrials):
    for T in [1e1, 1e3, 1e5, 1e7, 1e9, 1e11, 1e13]:
        for CE in [0.20, 0.40, 0.60, 0.80, 0.90, 0.99]:
            fname = outfile.replace(
                '@ALG@',
                'SA_CE{}_T{}'.format(int(CE * 100),
                                     int(T))).replace('@N@', str(t + 1))
            with open(fname, 'w') as f:
                f.write('iterations,fitness,time\n')
            sa = SimulatedAnnealing(T, CE, hcp)
            fit = FixedIterationTrainer(sa, 10)
            times = [0]

            early_stop = []
            for i in range(0, maxIters, 10):
                start = clock()
                fit.train()
                elapsed = time.clock() - start
                times.append(times[-1] + elapsed)
                score = ef.value(sa.getOptimal())
                st = '{},{},{}\n'.format(i, score, times[-1])
                print st
                with open(fname, 'a') as f:
                    f.write(st)