def run_mimic(t, samples, keep, m): fname = outfile.format('MIMIC{}_{}_{}'.format(samples, keep, m), str(t + 1)) base.write_header(fname) ef = CountOnesEvaluationFunction() odd = DiscreteUniformDistribution(ranges) nf = DiscreteChangeOneNeighbor(ranges) mf = DiscreteChangeOneMutation(ranges) cf = SingleCrossOver() gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf) df = DiscreteDependencyTree(m, ranges) pop = GenericProbabilisticOptimizationProblem(ef, odd, df) mimic = MIMIC(samples, keep, pop) fit = FixedIterationTrainer(mimic, 10) times = [0] for i in range(0, maxIters, 10): start = clock() fit.train() elapsed = time.clock() - start times.append(times[-1] + elapsed) fevals = ef.fevals score = ef.value(mimic.getOptimal()) ef.fevals -= 1 st = '{},{},{},{}\n'.format(i, score, times[-1], fevals) # print st base.write_to_file(fname, st) return
def makeProblem(num): global N N = num global fill fill = [2] * N global ranges ranges = array('i', fill) global ef ef = CountOnesEvaluationFunction() global odd odd = DiscreteUniformDistribution(ranges) global nf nf = DiscreteChangeOneNeighbor(ranges) global mf mf = DiscreteChangeOneMutation(ranges) global cf cf = SingleCrossOver() global df df = DiscreteDependencyTree(.1, ranges) global hcp hcp = GenericHillClimbingProblem(ef, odd, nf) global gap gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf) global pop pop = GenericProbabilisticOptimizationProblem(ef, odd, df) global GA_pop global myWriter, GA_iters, GA_mut, GA_keep GA_keep = int(GA_pop * .75) GA_mut = int(GA_pop * .15)
def countones(): N = 80 fill = [2] * N ranges = array('i', fill) ef = CountOnesEvaluationFunction() odd = DiscreteUniformDistribution(ranges) nf = DiscreteChangeOneNeighbor(ranges) mf = DiscreteChangeOneMutation(ranges) cf = SingleCrossOver() rhc_generic("COrhc50", ef, odd, nf, 1.0, 10000, 10, 5) sa_generic("COsa50", ef, odd, nf, 1.0, 10000, 10, 5, ([1E12, 1E6], [0.999, 0.99, 0.95])) ga_generic("COga50", ef, odd, mf, cf, 50.0, 10000, 10, 1, ([2000, 200], [0.5, 0.25], [0.25, 0.1, 0.02])) mimic_discrete("COmimic50", ef, odd, ranges, 300.0, 10000, 10, 1, ([200], [100], [0.1, 0.5, 0.9])) print "CO all done"
def run_sa(t, CE): fname = outfile.format('SA{}'.format(CE), str(t + 1)) base.write_header(fname) ef = CountOnesEvaluationFunction() odd = DiscreteUniformDistribution(ranges) nf = DiscreteChangeOneNeighbor(ranges) hcp = GenericHillClimbingProblem(ef, odd, nf) sa = SimulatedAnnealing(1E10, CE, hcp) fit = FixedIterationTrainer(sa, 10) times = [0] for i in range(0, maxIters, 10): start = clock() fit.train() elapsed = time.clock() - start times.append(times[-1] + elapsed) fevals = ef.fevals score = ef.value(sa.getOptimal()) ef.fevals -= 1 st = '{},{},{},{}\n'.format(i, score, times[-1], fevals) # print st base.write_to_file(fname, st) return
def run_rhc(t): fname = outfile.format('RHC', str(t + 1)) base.write_header(fname) ef = CountOnesEvaluationFunction() odd = DiscreteUniformDistribution(ranges) nf = DiscreteChangeOneNeighbor(ranges) hcp = GenericHillClimbingProblem(ef, odd, nf) rhc = RandomizedHillClimbing(hcp) fit = FixedIterationTrainer(rhc, 10) times = [0] for i in range(0, maxIters, 10): start = clock() fit.train() elapsed = time.clock() - start times.append(times[-1] + elapsed) fevals = ef.fevals score = ef.value(rhc.getOptimal()) ef.fevals -= 1 st = '{},{},{},{}\n'.format(i, score, times[-1], fevals) # print fname, st base.write_to_file(fname, st) return
def run_ga(t, pop, mate, mutate): fname = outfile.format('GA{}_{}_{}'.format(pop, mate, mutate), str(t + 1)) base.write_header(fname) ef = CountOnesEvaluationFunction() odd = DiscreteUniformDistribution(ranges) nf = DiscreteChangeOneNeighbor(ranges) mf = DiscreteChangeOneMutation(ranges) cf = SingleCrossOver() gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf) ga = StandardGeneticAlgorithm(pop, mate, mutate, gap) fit = FixedIterationTrainer(ga, 10) times = [0] for i in range(0, maxIters, 10): start = clock() fit.train() elapsed = time.clock() - start times.append(times[-1] + elapsed) fevals = ef.fevals score = ef.value(ga.getOptimal()) ef.fevals -= 1 st = '{},{},{},{}\n'.format(i, score, times[-1], fevals) # print st base.write_to_file(fname, st) return
else: cf = sf #sys.stdout = open("countones_ga_%d-%d-%d-%d-%d.txt" % (N, ga_pop,co_type,ga_keep,ga_mut), "w") sys.stdout = open("countones.csv", "a") runs = 10 """ Commandline parameter(s): N : number in the test vector runs : number of runs to average over """ fill = [2] * N ranges = array('i', fill) ef = CountOnesEvaluationFunction() odd = DiscreteUniformDistribution(ranges) nf = DiscreteChangeOneNeighbor(ranges) mf = DiscreteChangeOneMutation(ranges) cf = SingleCrossOver() df = DiscreteDependencyTree(.1, ranges) hcp = GenericHillClimbingProblem(ef, odd, nf) gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf) pop = GenericProbabilisticOptimizationProblem(ef, odd, df) t0 = time.time() calls = [] results = [] for _ in range(runs): ga = StandardGeneticAlgorithm(ga_pop, ga_keep, ga_mut, gap) fit = FixedIterationTrainer(ga, 150)
def run_count_ones_experiments(): OUTPUT_DIRECTORY = './output' N = 80 fill = [2] * N ranges = array('i', fill) ef = CountOnesEvaluationFunction() odd = DiscreteUniformDistribution(ranges) nf = DiscreteChangeOneNeighbor(ranges) mf = DiscreteChangeOneMutation(ranges) cf = SingleCrossOver() hcp = GenericHillClimbingProblem(ef, odd, nf) gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf) max_iter = 5000 outfile = OUTPUT_DIRECTORY + '/count_ones_{}_log.csv' # Randomized Hill Climber filename = outfile.format('rhc') with open(filename, 'w') as f: f.write('iteration,fitness,time\n') for it in range(0, max_iter, 10): rhc = RandomizedHillClimbing(hcp) fit = FixedIterationTrainer(rhc, it) start_time = time.clock() fit.train() elapsed_time = time.clock() - start_time # fevals = ef.fevals score = ef.value(rhc.getOptimal()) data = '{},{},{}\n'.format(it, score, elapsed_time) print(data) with open(filename, 'a') as f: f.write(data) # Simulated Annealing filename = outfile.format('sa') with open(filename, 'w') as f: f.write('iteration,cooling_value,fitness,time\n') for cooling_value in (.19, .38, .76, .95): for it in range(0, max_iter, 10): sa = SimulatedAnnealing(100, cooling_value, hcp) fit = FixedIterationTrainer(sa, it) start_time = time.clock() fit.train() elapsed_time = time.clock() - start_time # fevals = ef.fevals score = ef.value(sa.getOptimal()) data = '{},{},{},{}\n'.format(it, cooling_value, score, elapsed_time) print(data) with open(filename, 'a') as f: f.write(data) # Genetic Algorithm filename = outfile.format('ga') with open(filename, 'w') as f: f.write('iteration,population_size,to_mate,to_mutate,fitness,time\n') for population_size, to_mate, to_mutate in itertools.product([20], [4, 8, 16, 20], [0, 2, 4, 6]): for it in range(0, max_iter, 10): ga = StandardGeneticAlgorithm(population_size, to_mate, to_mutate, gap) fit = FixedIterationTrainer(ga, it) start_time = time.clock() fit.train() elapsed_time = time.clock() - start_time # fevals = ef.fevals score = ef.value(ga.getOptimal()) data = '{},{},{},{},{},{}\n'.format(it, population_size, to_mate, to_mutate, score, elapsed_time) print(data) with open(filename, 'a') as f: f.write(data) # MIMIC filename = outfile.format('mm') with open(filename, 'w') as f: f.write('iterations,samples,to_keep,m,fitness,time\n') for samples, to_keep, m in itertools.product([50], [10], [0.1, 0.3, 0.5, 0.7, 0.9]): for it in range(0, 500, 10): df = DiscreteDependencyTree(m, ranges) pop = GenericProbabilisticOptimizationProblem(ef, odd, df) mm = MIMIC(samples, 20, pop) fit = FixedIterationTrainer(mm, it) start_time = time.clock() fit.train() elapsed_time = time.clock() - start_time # fevals = ef.fevals score = ef.value(mm.getOptimal()) data = '{},{},{},{},{},{}\n'.format(it, samples, to_keep, m, score, elapsed_time) print(data) with open(filename, 'a') as f: f.write(data)
def run_algorithm_test(ranges, algorithms, output_file_name, trial_number, iterations=False): with open(output_file_name,'w') as f: f.write('algorithm,optimal_result,iterations,time,trial\n') ef = CountOnesEvaluationFunction() odd = DiscreteUniformDistribution(ranges) nf = DiscreteChangeOneNeighbor(ranges) mf = DiscreteChangeOneMutation(ranges) cf = SingleCrossOver() df = DiscreteDependencyTree(.1, ranges) hcp = GenericHillClimbingProblem(ef, odd, nf) gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf) pop = GenericProbabilisticOptimizationProblem(ef, odd, df) for trial in range(trial_number): if iterations is False: for item in algorithms: start_time = time.time() if item in ['rhc']: optimal_result, run_iters = run_rhc(hcp, ef) elif item in ['sa']: optimal_result, run_iters = run_sa(hcp, ef) elif item in ['ga']: optimal_result, run_iters = run_ga(gap, ef) elif item in ['mimic']: optimal_result, run_iters = run_mimic(pop, ef) else: print "The algorithm type {} is not supported.".format(item) end_time = time.time() time_elapsed = end_time - start_time run_output = '{},{},{},{},{}\n'.format(item, optimal_result, run_iters, time_elapsed, trial) with open(output_file_name,'a') as f: f.write(run_output) else: for iter in iterations: for item in algorithms: start_time = time.time() if item in ['rhc']: optimal_result, run_iters = run_rhc(hcp, ef, iter) elif item in ['sa']: optimal_result, run_iters = run_sa(hcp, ef, iter) elif item in ['ga']: optimal_result, run_iters = run_ga(gap, ef, iter) elif item in ['mimic']: optimal_result, run_iters = run_mimic(pop, ef, iter) else: print "The algorithm type {} is not supported.".format(item) end_time = time.time() time_elapsed = end_time - start_time run_output = '{},{},{},{},{}\n'.format(item, optimal_result, run_iters, time_elapsed, trial) with open(output_file_name,'a') as f: f.write(run_output) print "time elapsed is {}".format(time_elapsed) return