示例#1
0
except:
    tokeep = 10
runs = 10
    
#sys.stdout = open("countones_MIMIC-%d-%d-%d.txt" % (N, samples, tokeep), "w")
sys.stdout = open("countones.csv", "a")
"""
Commandline parameter(s):
   N : number in the test vector
   runs : number of runs to average over
"""

fill = [2] * N
ranges = array('i', fill)

ef = CountOnesEvaluationFunction()
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

t0 = time.time()
calls = []
results = []
for _ in range(runs):
    mimic = MIMIC(samples, tokeep, pop)
    fit = FixedIterationTrainer(mimic, 100)
示例#2
0
文件: countones.py 项目: PC98/ABAGAIL
Commandline parameter(s):
   none
"""

iterations_data = {}
iterations_file = "count_ones_data.pickle"

if os.path.isfile(iterations_file):
    stdout.write("\nCount Ones Data found.\n")
    exit(0)

N=400
fill = [2] * N
ranges = array('i', fill)

ef = CountOnesEvaluationFunction()
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

x = xrange(50, 550, 50)
optimal_value = {'RHC': [], 'SA': [], 'GA': [], 'MIMIC': []}


for item in x:
    stdout.write("\nRunning Count Ones with %d iterations...\n" % item)
from array import array



"""
Commandline parameter(s):
   none
"""

N= 1000
T= N / 4
fill = [2] * N
ranges = array('i', fill)

ef = CountOnesEvaluationFunction()
initial_distribution = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mutation_function = DiscreteChangeOneMutation(ranges)

cf = SingleCrossOver()


df = DiscreteDependencyTree(.1, ranges)
hill_climbing_problem = GenericHillClimbingProblem(ef, initial_distribution, nf)
genetic_problem = GenericGeneticAlgorithmProblem(ef, initial_distribution, mutation_function, cf)
probablistic_optimization = GenericProbabilisticOptimizationProblem(ef, initial_distribution, df)

from time import time
f = open("experiments/results/countones_optimal_1000.txt", "w")
示例#4
0
import opt.prob.GenericProbabilisticOptimizationProblem as GenericProbabilisticOptimizationProblem
import opt.prob.MIMIC as MIMIC
import opt.prob.ProbabilisticOptimizationProblem as ProbabilisticOptimizationProblem
import shared.FixedIterationTrainer as FixedIterationTrainer
import opt.example.CountOnesEvaluationFunction as CountOnesEvaluationFunction
from array import array
"""
Commandline parameter(s):
   none
"""

N = 80
fill = [2] * N
ranges = array('i', fill)

ef = CountOnesEvaluationFunction()
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

rhc = RandomizedHillClimbing(hcp)
fit = FixedIterationTrainer(rhc, 200)
fit.train()
print "RHC: " + str(ef.value(rhc.getOptimal()))

sa = SimulatedAnnealing(100, .95, hcp)
示例#5
0
    runs = int(sys.argv[2])
except:
    runs = 20

sys.stdout = open("countones-%d.txt" % N, "w")

"""
Commandline parameter(s):
   N : number in the test vector
   runs : number of runs to average over
"""

fill = [2] * N
ranges = array('i', fill)

ef = CountOnesEvaluationFunction()
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)


iters = 5000

t0 = time.time()
calls = []
results = []
示例#6
0
def run_algorithm_test(ranges, algorithms, output_file_name, trial_number, iterations=False):

    with open(output_file_name,'w') as f:
        f.write('algorithm,optimal_result,iterations,time,trial\n')

    ef = CountOnesEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()
    df = DiscreteDependencyTree(.1, ranges)

    hcp = GenericHillClimbingProblem(ef, odd, nf)
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

    for trial in range(trial_number):
        if iterations is False:
            for item in algorithms:
                start_time = time.time()
                if item in ['rhc']:
                    optimal_result, run_iters = run_rhc(hcp, ef)
                elif item in ['sa']:
                    optimal_result, run_iters = run_sa(hcp, ef)
                elif item in ['ga']:
                    optimal_result, run_iters = run_ga(gap, ef)
                elif item in ['mimic']:
                    optimal_result, run_iters = run_mimic(pop, ef)
                else:
                    print "The algorithm type {} is not supported.".format(item)

                end_time = time.time()
                time_elapsed = end_time - start_time

                run_output = '{},{},{},{},{}\n'.format(item, optimal_result, run_iters, time_elapsed, trial)
                with open(output_file_name,'a') as f:
                    f.write(run_output)
        else:
            for iter in iterations:
                for item in algorithms:
                    start_time = time.time()
                    if item in ['rhc']:
                        optimal_result, run_iters = run_rhc(hcp, ef, iter)
                    elif item in ['sa']:
                        optimal_result, run_iters = run_sa(hcp, ef, iter)
                    elif item in ['ga']:
                        optimal_result, run_iters = run_ga(gap, ef, iter)
                    elif item in ['mimic']:
                        optimal_result, run_iters = run_mimic(pop, ef, iter)
                    else:
                        print "The algorithm type {} is not supported.".format(item)

                    end_time = time.time()
                    time_elapsed = end_time - start_time

                    run_output = '{},{},{},{},{}\n'.format(item, optimal_result, run_iters, time_elapsed, trial)

                    with open(output_file_name,'a') as f:
                        f.write(run_output)

    print "time elapsed is {}".format(time_elapsed)

    return
示例#7
0
import opt.example.CountOnesEvaluationFunction as CountOnesEvaluationFunction
from array import array




"""
Commandline parameter(s):
   none
"""

N=80
fill = [2] * N
ranges = array('i', fill)

ef = CountOnesEvaluationFunction()
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

rhc = RandomizedHillClimbing(hcp)
fit = FixedIterationTrainer(rhc, 200)
fit.train()
print "RHC: " + str(ef.value(rhc.getOptimal()))

sa = SimulatedAnnealing(100, .95, hcp)
示例#8
0
"""

N = 100
fill = [2] * N
ranges = array('i', fill)
"""
Q: I am trying to understand what the count ones optimization function is doing.
From the test class in ABAGAIL, I see that an array is created and filled up with all int=2.
I think i understand the concept of counting all of the 1s in the vector,
but I do not see how the array of all 2s turns into an array of 1s and 0s?

If you dig through the classes that uses the ranges variable, it will become clear.
The 2 specifies how many different values are possible at any point in the vector (0 and 1).
"""

ef = CountOnesEvaluationFunction()
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

rhc = RandomizedHillClimbing(hcp)
print "Random Hill Climb"
iters_list = [10, 25, 50, 100, 200, 300, 400, 500]
for iters in iters_list:
    fit = FixedIterationTrainer(rhc, iters)
    start = time.time()
示例#9
0
BASE_PATH = os.getcwd() + "/"
OUTFILE_BASE = BASE_PATH + 'out/countones/'

if not os.path.exists(OUTFILE_BASE):
    os.makedirs(OUTFILE_BASE)

for algo in ["RHC", "MIMIC", "GA", "SA"]:
    with open(OUTFILE_BASE + algo + ".csv", 'w') as f:
        f.write("iterations,training_time,fitness\n")

for N in xrange(10, 1000):
    fill = [2] * N
    ranges = array('i', fill)

    ef = CountOnesEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()
    df = DiscreteDependencyTree(.1, ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

    rhc = RandomizedHillClimbing(hcp)
    fit = FixedIterationTrainer(rhc, N)
    start = time.time()
    fit.train()
    end = time.time()
    training_time = end - start
示例#10
0
import opt.prob.MIMIC as MIMIC
import opt.prob.ProbabilisticOptimizationProblem as ProbabilisticOptimizationProblem
import shared.FixedIterationTrainer as FixedIterationTrainer
import opt.example.CountOnesEvaluationFunction as CountOnesEvaluationFunction
from array import array
import shared.ConvergenceTrainer as ConvergenceTrainer
"""
Commandline parameter(s):
   none
"""

N = 80
fill = [2] * N
ranges = array('i', fill)

ef = CountOnesEvaluationFunction()

odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

times = ""
print "RHC:"
for x in range(20):
    start = time.time()
    iterations = (x + 1) * 250