Example #1
0
def run_ga(t, pop, mate, mutate):
    fname = outfile.format('GA{}_{}_{}'.format(pop, mate, mutate), str(t + 1))
    with open(fname, 'a+') as f:
        content = f.read()
        if "fitness" not in content:
            f.write('iterations,fitness,time,fevals\n')
    ef = FlipFlopEvaluationFunction()
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    ga = StandardGeneticAlgorithm(pop, mate, mutate, gap)
    fit = FixedIterationTrainer(ga, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(ga.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        # print st
        base.write_to_file(fname, st)
    return
Example #2
0
def run_ga(gap, ef, iterations=1000):

    ga = StandardGeneticAlgorithm(200, 100, 10, gap)
    fit = FixedIterationTrainer(ga, iterations)
    fit.train()
    optimal_result = str(ef.value(ga.getOptimal()))
    print "GA: " + optimal_result

    return optimal_result, iterations
Example #3
0
    def run_experiment(self, opName):
        """Run a genetic algorithms optimization experiment for a given
        optimization problem.

        Args:
            ef (AbstractEvaluationFunction): Evaluation function.
            ranges (array): Search space ranges.
            op (str): Name of optimization problem.

        """
        outdir = 'results/OPT/{}'.format(opName)  # get results directory
        outfile = 'GA_{}_{}_{}_results.csv'.format(self.p, self.ma, self.mu)
        fname = get_abspath(outfile, outdir)  # get output filename

        # delete existing results file, if it already exists
        try:
            os.remove(fname)
        except Exception as e:
            print e
            pass

        with open(fname, 'w') as f:
            f.write('iterations,fitness,time,fevals,trial\n')

        # start experiment
        for t in range(self.numTrials):
            # initialize optimization problem and training functions
            ranges, ef = self.op.get_ef()
            mf = None
            cf = None
            if opName == 'TSP':
                mf = SwapMutation()
                cf = TravelingSalesmanCrossOver(ef)
            else:
                mf = DiscreteChangeOneMutation(ranges)
                cf = SingleCrossOver()
            odd = DiscreteUniformDistribution(ranges)
            gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
            ga = StandardGeneticAlgorithm(self.p, self.ma, self.mu, gap)
            fit = FixedIterationTrainer(ga, 10)

            # run experiment and train evaluation function
            start = time.clock()
            for i in range(0, self.maxIters, 10):
                fit.train()
                elapsed = time.clock() - start
                fe = ef.valueCallCount
                score = ef.value(ga.getOptimal())
                ef.valueCallCount -= 1

                # write results to output file
                s = '{},{},{},{},{}\n'.format(i + 10, score, elapsed, fe, t)
                with open(fname, 'a+') as f:
                    f.write(s)
Example #4
0
def GA():
    correctCount = 0
    t = 0
    totalTime = 0
    #GA_iters=1
    attempts = 0
    threshold = .1
    iters = 0
    NUM_ITERS = 10
    global ga, GA_keep, GA_mut
    ga = StandardGeneticAlgorithm(int(GA_pop), GA_keep, GA_mut, gap)
    while correctCount < 1 and attempts <= 50000:
        attempts += 1
        start = time.time()
        fit = ConvergenceTrainer(
            ga, threshold,
            NUM_ITERS)  #FixedIterationTrainer(ga, int(GA_iters))
        fitness = fit.train()
        t = time.time() - start
        totalTime += t
        iters += fit.getIterations()
        myWriter.addValue(fitness, "GA_fitness", runNum)
        myWriter.addValue(t, "GA_searchTimes", runNum)

        v = ef.value(ga.getOptimal())
        if v == N:
            correctCount += 1
            #print "GA correct with v  " + str(v) +" correctCount = "+ str (correctCount)
        else:
            if fit.getIterations(
            ) < NUM_ITERS:  #it hit its iters and still got it wrong, so the threshold was too low
                threshold /= 10
            correctCount = 0
            #GA_pop += N #5*N#*=1.2
            #GA_iters *= 1.5
        # GA_mut = int(GA_pop * .25)
        #GA_keep = int(GA_pop * .80)
    myWriter.addValue(totalTime, "GA_times", 0)
    myWriter.addValue(iters, "GA_iters", 0)
    myWriter.addValue(int(GA_pop), "GA_pop", 0)
    myWriter.addValue(int(GA_mut), "GA_mut", 0)
    myWriter.addValue(int(GA_keep), "GA_keep", 0)
    myWriter.addValue(threshold, "GA_threshold", 0)
    print(
        str(N) + ": GA: " + str(ef.value(ga.getOptimal())) + " took " +
        str(totalTime) + " seconds and " + str(iters) + " iters w/ pop " +
        str(GA_pop) + " mut " + str(GA_mut) + " keep " + str(GA_keep) +
        " for fitness " + str(fitness) + " in " + str(attempts) +
        " attempts " + " thresh " + str(threshold))
Example #5
0
def main(P, mate, mutate):
    """Run this experiment"""
    training_ints = initialize_instances(TRAIN_DATA_FILE)
    testing_ints = initialize_instances(TEST_DATA_FILE)
    validation_ints = initialize_instances(VALIDATE_DATA_FILE)
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    relu = RELU()
    # 50 and 0.000001 are the defaults from RPROPUpdateRule.java
    rule = RPROPUpdateRule(0.064, 50, 0.000001)
    oa_name = "GA_{}_{}_{}".format(P, mate, mutate)
    with open(OUTFILE.format(oa_name), 'w') as f:
        f.write('{},{},{},{},{},{},{},{},{},{},{},{},{},{}\n'.format(
            'iteration', 'MSE_trg', 'MSE_val', 'MSE_tst', 'acc_trg', 'acc_val',
            'acc_tst', 'f1_trg', 'f1_val', 'f1_tst', 'bal_trg', 'bal_val',
            'bal_tst', 'elapsed'))

    classification_network = factory.createClassificationNetwork(
        [INPUT_LAYER, HIDDEN_LAYER1, HIDDEN_LAYER2, OUTPUT_LAYER], relu)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network,
                                            measure)
    oa = StandardGeneticAlgorithm(P, mate, mutate, nnop)
    train(oa, classification_network, oa_name, training_ints, validation_ints,
          testing_ints, measure, TRAINING_ITERATIONS, OUTFILE.format(oa_name))
Example #6
0
def main(P, mate, mutate):
    #training_ints = initialize_instances('bCancer_trg.csv')
    #testing_ints = initialize_instances('bCancer_test.csv')
    #validation_ints = initialize_instances('bCancer_val.csv')

    training_ints = initialize_instances('winequality_trg.csv')
    testing_ints = initialize_instances('winequality_test.csv')
    validation_ints = initialize_instances('winequality_val.csv')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    hts = HyperbolicTangentSigmoid()
    rule = RPROPUpdateRule()
    oa_name = "GA_{}_{}_{}".format(P, mate, mutate)
    with open(OUTFILE.replace('XXX', oa_name), 'w') as f:
        f.write('{},{},{},{},{},{},{},{}\n'.format('iteration', 'MSE_trg',
                                                   'MSE_val', 'MSE_tst',
                                                   'acc_trg', 'acc_val',
                                                   'acc_tst', 'elapsed'))
    classification_network = factory.createClassificationNetwork(
        [INPUT_LAYER, HIDDEN_LAYER1, OUTPUT_LAYER], hts)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network,
                                            measure)
    oa = StandardGeneticAlgorithm(P, mate, mutate, nnop)
    train(oa, classification_network, oa_name, training_ints, validation_ints,
          testing_ints, measure)
Example #7
0
def main(P, mate, mutate):
    """Run this experiment"""
    training_ints = initialize_instances(
        '/Users/Sean/School/GeorgiaTech/CS7641/Assignment2/s_trg.csv')
    testing_ints = initialize_instances(
        '/Users/Sean/School/GeorgiaTech/CS7641/Assignment2/s_test.csv')
    validation_ints = initialize_instances(
        '/Users/Sean/School/GeorgiaTech/CS7641/Assignment2/s_val.csv')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    sig = LogisticSigmoid()
    rule = RPROPUpdateRule()
    oa_name = "GA_%s_%s_%s" % (P, mate, mutate)
    with open(OUTFILE.replace('XXX', oa_name), 'w') as f:
        f.write('%s,%s,%s,%s,%s,%s,%s,%s\n' %
                ('iteration', 'MSE_trg', 'MSE_val', 'MSE_tst', 'acc_trg',
                 'acc_val', 'acc_tst', 'elapsed'))
    classification_network = factory.createClassificationNetwork([
        INPUT_LAYER, HIDDEN_LAYER1, HIDDEN_LAYER2, HIDDEN_LAYER3, OUTPUT_LAYER
    ], sig)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network,
                                            measure)
    for trial in xrange(TRIALS):
        oa = StandardGeneticAlgorithm(P, mate, mutate, nnop)
        train(oa, classification_network, oa_name, training_ints,
              validation_ints, testing_ints, measure)
Example #8
0
def ga_generic(name, ef, odd, mf, cf, iter_time, iters_total, iters_step, n_trials, params):
    for i_trial in range(n_trials):
        for popsize, toMate, toMutate in itertools.product(*params):
            gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
            ga_instance = StandardGeneticAlgorithm(popsize, int(popsize * toMate), int(popsize * toMutate), gap)
            ga_trainer = FixedIterationTrainer(ga_instance, iters_step)
            ga_state = {'problem': ga_instance,
                        'trainer': ga_trainer}
            wrapper_ga = AlgoWrapper(ga_state,
                                     lambda state: state['trainer'].train(),
                                     lambda state: ef.value(state['problem'].getOptimal()),
                                     lambda state: ef.value(state['problem'].getOptimal())
                                     )
            # create name and invalidate if super empty
            decorated_name = ""
            if name is not None and name != "":
                decorated_name = name + "_popSize_" + str(popsize) + "_toMate_" + str(toMate) + "_toMutate_" + str(toMutate)
            timed_trainer = TimedTrainer(decorated_name,
                                         wrapper_ga,
                                         iter_time,
                                         iters_total,
                                         iters_step,
                                         _param_dict={'name':name,
                                                      'popSize':popsize,
                                                      'toMate':toMate,
                                                      'toMutate':toMutate}
                                         )
            timed_trainer.run()
def main(P, mate, mutate):
    """Run this experiment"""
    training_ints = initialize_instances('./../data/x_train_val.csv')
    testing_ints = initialize_instances('./../data/x_test.csv')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    acti = HyperbolicTangentSigmoid()
    rule = RPROPUpdateRule()
    oa_name = "GA_{}_{}_{}".format(P, mate, mutate)
    FILE = OUTFILE.replace('XXX', oa_name)
    with open(FILE, 'w') as f:
        f.write('{},{},{},{},{},{},{},{},{}\n'.format('iteration', 'MSE_trg',
                                                      'MSE_tst', 'acc_trg',
                                                      'acc_tst', 'f1_trg',
                                                      'f1_tst', 'train_time',
                                                      'pred_time'))
    classification_network = factory.createClassificationNetwork([
        INPUT_LAYER, HIDDEN_LAYER1, HIDDEN_LAYER2, HIDDEN_LAYER3,
        HIDDEN_LAYER4, OUTPUT_LAYER
    ], acti)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network,
                                            measure)
    oa = StandardGeneticAlgorithm(P, mate, mutate, nnop)
    train(oa, classification_network, oa_name, training_ints, testing_ints,
          measure, TRAINING_ITERATIONS, FILE)
Example #10
0
def main(P, mate, mutate, layers, training_iterations, test_data_file, train_data_file, validate_data_file):
    """Run this experiment"""
    training_ints = base.initialize_instances(train_data_file)
    testing_ints = base.initialize_instances(test_data_file)
    validation_ints = base.initialize_instances(validate_data_file)
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    # relu = RELU()
    relu = LogisticSigmoid()
    # 50 and 0.000001 are the defaults from RPROPUpdateRule.java
    rule = RPROPUpdateRule(0.064, 50, 0.000001)
    oa_name = "GA_{}_{}_{}".format(P, mate, mutate)
    with open(OUTFILE.format(oa_name), 'w') as f:
        f.write('{},{},{},{},{},{},{},{},{},{},{}\n'.format('iteration', 'MSE_trg', 'MSE_val', 'MSE_tst', 'acc_trg',
                                                            'acc_val', 'acc_tst', 'f1_trg', 'f1_val', 'f1_tst',
                                                            'elapsed'))
    classification_network = factory.createClassificationNetwork(
        layers, relu)
    nnop = NeuralNetworkOptimizationProblem(
        data_set, classification_network, measure)
    oa = StandardGeneticAlgorithm(P, mate, mutate, nnop)
    base.train(oa, classification_network, oa_name, training_ints, validation_ints, testing_ints, measure,
               training_iterations, OUTFILE.format(oa_name))
    return
def main(P, mate, mutate):
    """Run this experiment"""
    training_ints = initialize_instances('s_trg.csv')
    testing_ints = initialize_instances('s_test.csv')
    validation_ints = initialize_instances('s_val.csv')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    relu = RELU()
    rule = RPROPUpdateRule()
    oa_name = "GA_{}_{}_{}".format(P, mate, mutate)
    with open(OUTFILE.replace('XXX', oa_name), 'w') as f:
        f.write('{},{},{},{},{},{},{},{}\n'.format('iteration', 'MSE_trg',
                                                   'MSE_val', 'MSE_tst',
                                                   'acc_trg', 'acc_val',
                                                   'acc_tst', 'elapsed'))
    classification_network = factory.createClassificationNetwork([
        INPUT_LAYER, HIDDEN_LAYER1, HIDDEN_LAYER2, HIDDEN_LAYER3, OUTPUT_LAYER
    ], relu)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network,
                                            measure)
    for trial in xrange(TRIALS):
        oa = StandardGeneticAlgorithm(P, mate, mutate, nnop)
        train(oa, classification_network, oa_name, training_ints,
              validation_ints, testing_ints, measure)
Example #12
0
def ga_network(name, network, measure, train_set, test_set, acc_func, iter_time, iters_total, iters_step, n_trials, params):
    for i_trial in range(n_trials):
        for popsize, toMate, toMutate in itertools.product(*params):
            network_optimizer = NeuralNetworkOptimizationProblem(train_set, network, measure)
            ga_instance = StandardGeneticAlgorithm(popsize, int(popsize * toMate), int(popsize * toMutate), network_optimizer)
            ga_trainer = FixedIterationTrainer(ga_instance, iters_step)
            nn_state = {'network': network,
                        'trainer': ga_trainer}
            wrapper_ga = AlgoWrapper(nn_state,
                                      lambda state: state['trainer'].train(),
                                      lambda state: acc_func(train_set, state['network'], measure),
                                      lambda state: acc_func(test_set, state['network'], measure)
                                      )
            # create name and invalidate if super empty
            decorated_name = ""
            if name is not None and name != "":
                decorated_name = name + "_popSize_" + str(popsize) + "_toMate_" + str(toMate) + "_toMutate_" + str(toMutate)
            timed_trainer = TimedTrainer(decorated_name,
                                         wrapper_ga,
                                         iter_time,
                                         iters_total,
                                         iters_step,
                                         _param_dict={'name':name,
                                                      'popSize':popsize,
                                                      'toMate':toMate,
                                                      'toMutate':toMutate}
                                         )
            timed_trainer.run()
def main(P, mate, mutate):
    oa_name = "GA_{}_{}_{}".format(P, mate, mutate)
    with open(OUTFILE.replace('XXX', oa_name), 'w') as f:
        f.write('{},{},{},{},{},{}\n'.format('iteration', 'MSE_train',
                                             'MSE_test', 'acc_train',
                                             'acc_tst', 'elapsed'))

    training_data = initialize_instances('../data/Pima-train.csv')
    testing_data = initialize_instances('../data/Pima-test.csv')
    print(len(training_data))
    #testing_ints = initialize_instances('m_test.csv')
    #validation_ints = initialize_instances('m_val.csv')

    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_data)
    relu = RELU()
    rule = RPROPUpdateRule()
    classification_network = factory.createClassificationNetwork(
        [INPUT_LAYER, HIDDEN_LAYER1, HIDDEN_LAYER2, OUTPUT_LAYER], relu)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network,
                                            measure)
    oa = StandardGeneticAlgorithm(P, mate, mutate, nnop)
    train(oa, classification_network, oa_name, training_data, testing_data,
          measure)
Example #14
0
def ga():
    for popu, mate, mutate in product([50], [30, 20, 10], [20, 10]):
        fname = outfile.replace('XXX', 'GA{}_{}_{}'.format(popu, mate, mutate))
        with open(fname, 'w') as f:
            f.write('iterations,fitness,time,fevals\n')
        ga = StandardGeneticAlgorithm(popu, mate, mutate, gap)
        perform(ga, fname)
Example #15
0
def main(output_filename):
    """Run this experiment"""
    training_ints = initialize_instances('out_digits_train.csv')
    testing_ints = initialize_instances('out_digits_test.csv')
    validation_ints = initialize_instances('out_digits_test.csv')

    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)

    relu = RELU()
    rule = RPROPUpdateRule()

    with open(output_filename,'w') as f:
        f.write('{},{},{},{},{},{},{},{}\n'.format('iteration','MSE_trg','MSE_val','MSE_tst','acc_trg','acc_val','acc_tst','elapsed'))
    classification_network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER1,HIDDEN_LAYER2,HIDDEN_LAYER3, OUTPUT_LAYER],relu)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network, measure)
    # oa = SimulatedAnnealing(1E10, 0.95, nnop)
    # oa = RandomizedHillClimbing(nnop)

    P = 50
    mate = 20
    mutate = 20
    oa = StandardGeneticAlgorithm(P, mate, mutate, nnop)

    train(oa, classification_network, output_filename, training_ints,validation_ints,testing_ints, measure)
Example #16
0
def ga_fac(args={}):
    constant_params = {'hcp': hcp}
    params = merge_two_dicts(args, constant_params)
    ga = StandardGeneticAlgorithm(
        args['populationSize'], int(args['populationSize'] * args['toMate']),
        int(args['populationSize'] * args['toMutate']), gap)
    gfit = FixedIterationTrainer(ga, num_iterations)
    return gfit
Example #17
0
def ga():
    for popu, mate, mutate in product([50, 60, 70, 80, 90], [40, 30, 20, 10], [30, 20, 10, 5]):
        fname = outfile.replace('XXX','GA{}_{}_{}'.format(popu,mate,mutate))
        with open(fname,'w') as f:
            f.write('iterations,fitness,time,fevals\n')
        gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
        ga = StandardGeneticAlgorithm(popu, mate, mutate, gap)
        perform(ga,fname)
Example #18
0
def solveit(oaname, params):
    N = 60
    T = N / 10
    fill = [2] * N
    ranges = array('i', fill)
    iterations = 10000
    tryi = 1

    ef = ContinuousPeaksEvaluationFunction(T)
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()
    df = DiscreteDependencyTree(.1, ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

    #  fit = FixedIterationTrainer(rhc, 200000)
    #  fit.train()

    if oaname == 'RHC':
        iterations = int(params[0])
        tryi = int(params[1])
        oa = RandomizedHillClimbing(hcp)
    if oaname == 'SA':
        oa = SimulatedAnnealing(float(params[0]), float(params[1]), hcp)
    if oaname == 'GA':
        oa = StandardGeneticAlgorithm(int(params[0]), int(params[1]),
                                      int(params[2]), gap)
    if oaname == 'MMC':
        oa = MIMIC(int(params[0]), int(params[1]), pop)

    print "Running %s using %s for %d iterations, try %d" % (
        oaname, ','.join(params), iterations, tryi)
    print "=" * 20
    starttime = timeit.default_timer()
    output = []
    for i in range(iterations):
        oa.train()
        if i % 10 == 0:
            optimal = oa.getOptimal()
            score = ef.value(optimal)
            elapsed = float(timeit.default_timer() - starttime)
            output.append([str(i), str(score), str(elapsed)])

    print 'score: %.3f' % score
    print 'train time: %.3f secs' % (int(timeit.default_timer() - starttime))

    scsv = 'cp-%s-%s.csv' % (oaname, '-'.join(params))
    print "Saving to %s" % (scsv),
    with open(scsv, 'w') as csvf:
        writer = csv.writer(csvf)
        for row in output:
            writer.writerow(row)
    print "saved."
    print "=" * 20
Example #19
0
def main():
    training_ints = initialize_instances('data/bank_train.csv')
    testing_ints = initialize_instances('data/bank_test.csv')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    acti = LogisticSigmoid()
    rule = RPROPUpdateRule()
    ######################### back prop #####################

    classification_network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER1, OUTPUT_LAYER],acti)
    train(BatchBackPropagationTrainer(data_set,classification_network,measure,rule,), 
            classification_network,
            'Backprop', 
            training_ints,testing_ints, measure,
            './ANN/BP/BACKPROP_LOG.csv',
            2000)

    ######################### simulated annealing #################

    for CE in [0.15,0.35,0.55,0.75,0.95]:
        for  T in [1e8,1e10,1e12]:
            classification_network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER1, OUTPUT_LAYER],acti)
            nnop = NeuralNetworkOptimizationProblem(data_set, classification_network, measure)
            oFile = "./ANN/SA/%s_%s_LOG.csv"%(CE,T)
            train(SimulatedAnnealing(T, CE, nnop), 
            classification_network, 
            'simulated annealing', 
            training_ints, testing_ints, measure,
            oFile,
            2000)
    
    ######################### random hill climbing #################

    classification_network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER1, OUTPUT_LAYER],acti)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network, measure)
    train(RandomizedHillClimbing(nnop), 
        classification_network, 
        'RHC', 
        training_ints, testing_ints, measure,
        './ANN/RHC/RHC_LOG.csv',
        2000)

    ######################### genetic algorithm #################
    
    for P in [100]:
        for mate in [5, 15, 30]:
            for mutate in [5,15,30]:
                classification_network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER1, OUTPUT_LAYER],acti)
                nnop = NeuralNetworkOptimizationProblem(data_set, classification_network, measure)
                oFile = "./ANN/GA/%s_%s_%s_LOG.csv"%(P, mate, mutate)
                train(StandardGeneticAlgorithm(P, mate, mutate, nnop), 
                    classification_network, 
                    'GA', 
                    training_ints, testing_ints, measure,
                    oFile,
                    2000)
Example #20
0
def main():
    """Run algorithms on the abalone dataset."""
    instances = initialize_instances()
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(instances)

    networks = []  # BackPropagationNetwork
    nnop = []  # NeuralNetworkOptimizationProblem
    oa = []  # OptimizationAlgorithm
    oa_names = ["RHC", "SA", "GA"]
    results = ""

    for name in oa_names:
        classification_network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER, OUTPUT_LAYER])
        networks.append(classification_network)
        nnop.append(NeuralNetworkOptimizationProblem(data_set, classification_network, measure))

    oa.append(RandomizedHillClimbing(nnop[0]))
    oa.append(SimulatedAnnealing(1E11, .95, nnop[1]))
    oa.append(StandardGeneticAlgorithm(200, 100, 10, nnop[2]))

    for i, name in enumerate(oa_names):
        start = time.time()
        correct = 0
        incorrect = 0

        train(oa[i], networks[i], oa_names[i], instances, measure)
        end = time.time()
        training_time = end - start

        optimal_instance = oa[i].getOptimal()
        networks[i].setWeights(optimal_instance.getData())

        start = time.time()
        for instance in instances:
            networks[i].setInputValues(instance.getData())
            networks[i].run()

            predicted = instance.getLabel().getContinuous()
            actual = networks[i].getOutputValues().get(0)

            if abs(predicted - actual) < 0.5:
                correct += 1
            else:
                incorrect += 1

        end = time.time()
        testing_time = end - start

        results += "\nResults for %s: \nCorrectly classified %d instances." % (name, correct)
        results += "\nIncorrectly classified %d instances.\nPercent correctly classified: %0.03f%%" % (incorrect, float(correct)/(correct+incorrect)*100.0)
        results += "\nTraining time: %0.03f seconds" % (training_time,)
        results += "\nTesting time: %0.03f seconds\n" % (testing_time,)

    print results
Example #21
0
File: tsp.py Project: dm458/abores3
def run_ga(t, pop, mate, mutate):
    fname = outfile.format('GA{}_{}_{}'.format(pop, mate, mutate), str(t + 1))
    ef = TravelingSalesmanRouteEvaluationFunction(points)
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    ga = StandardGeneticAlgorithm(pop, mate, mutate, gap)
    fit = FixedIterationTrainer(ga, 10)
    times = [0]
    for i in range(0, maxIters, 10):
        start = clock()
        fit.train()
        elapsed = time.clock() - start
        times.append(times[-1] + elapsed)
        fevals = ef.fevals
        score = ef.value(ga.getOptimal())
        ef.fevals -= 1
        st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
        # print st
        base.write_to_file(fname, st)
    return
Example #22
0
def GA():
    correctCount = 0
    t=0
    totalTime = 0
    #GA_iters=1
    attempts = 0
    threshold = .1
    iters = 0
    NUM_ITERS =10
    global ga, GA_keep, GA_mut
    ga = StandardGeneticAlgorithm(int(GA_pop), GA_keep, GA_mut, gap)
    while correctCount < 1 and attempts <= 50000:
        attempts +=1
        start = time.time()
        fit =ConvergenceTrainer(ga, threshold, NUM_ITERS)  #FixedIterationTrainer(ga, int(GA_iters))
        fitness=fit.train()
        t = time.time() - start
        totalTime +=t
        iters += fit.getIterations()
        myWriter.addValue(fitness, "GA_fitness", runNum)
        myWriter.addValue(t, "GA_searchTimes",runNum)

        v = ef.value(ga.getOptimal())
        if v == N:
            correctCount+= 1
            #print "GA correct with v  " + str(v) +" correctCount = "+ str (correctCount)
        else:
            if fit.getIterations() < NUM_ITERS: #it hit its iters and still got it wrong, so the threshold was too low
                threshold /= 10
            correctCount = 0
            #GA_pop += N #5*N#*=1.2
            #GA_iters *= 1.5
           # GA_mut = int(GA_pop * .25)
           #GA_keep = int(GA_pop * .80)
    myWriter.addValue(totalTime,"GA_times",0)
    myWriter.addValue(iters,"GA_iters",0)
    myWriter.addValue(int(GA_pop),"GA_pop",0)
    myWriter.addValue(int(GA_mut),"GA_mut",0)
    myWriter.addValue(int(GA_keep),"GA_keep",0)
    myWriter.addValue(threshold,"GA_threshold",0)
    print(str(N) + ": GA: " + str(ef.value(ga.getOptimal())) + " took " + str(totalTime) + " seconds and "
    + str(iters) + " iters w/ pop " + str(GA_pop) + " mut " + str(GA_mut) + " keep "+ str(GA_keep)+ " for fitness "
    +str(fitness)+ " in " +str(attempts) + " attempts " + " thresh " + str(threshold) )
Example #23
0
def main():
    """Run algorithms on the adult dataset."""
    train_instances = initialize_instances(trainX)
    test_instances = initialize_instances(testX)
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(train_instances)

    networks = []  # BackPropagationNetwork
    nnop = []  # NeuralNetworkOptimizationProblem
    oa = []  # OptimizationAlgorithm
    oa_names = ["RHC", "SA", "GA", "BP"]
    print(sys.argv)
    if len(sys.argv) > 1:
        oa_names = [sys.argv[1]]
        set_num = sys.argv[2]
    # results = ""
    for name in oa_names:
        classification_network = factory.createClassificationNetwork(
            [INPUT_LAYER, HIDDEN_LAYER, OUTPUT_LAYER], RELU())
        networks.append(classification_network)
        if name != "BP":
            nnop.append(
                NeuralNetworkOptimizationProblem(data_set,
                                                 classification_network,
                                                 measure))
        else:
            print("adding backprop")
            rule = RPROPUpdateRule()
            nnop.append(
                BatchBackPropagationTrainer(data_set, classification_network,
                                            measure, rule))

    if "RHC" in oa_names:
        rhc_index = oa_names.index("RHC")
        oa.append(RandomizedHillClimbing(nnop[rhc_index]))
    if "SA" in oa_names:
        sa_index = oa_names.index("SA")
        oa.append(SimulatedAnnealing(1E11, .95, nnop[sa_index]))
    if "GA" in oa_names:
        ga_index = oa_names.index("GA")
        oa.append(StandardGeneticAlgorithm(100, 50, 10, nnop[ga_index]))
    if "BP" in oa_names:
        rule = RPROPUpdateRule()
        bp_index = oa_names.index("BP")
        oa.append(nnop[bp_index])

    for i, name in enumerate(oa_names):
        train(oa[i], networks[i], oa_names[i], train_instances, test_instances,
              measure)
Example #24
0
def PopulationRangeExperiment(name,  points, popRange, mateRange, mutRange, iterRange, mat):
    lastRow = -1
    for idx,i in enumerate(popRange):
        for jdx, j in enumerate(mateRange):
            print('.')
            for kdx, k in enumerate(mutRange):
                row = idx * len(mateRange)*len(mutRange) + jdx*len(mutRange)+ kdx
                if row < lastRow:
                    print "ERROR in ROW CALC!"
                lastRow = row
                if j > i or k > i:
                    #print "skipping bad values for i,j,k "
                    continue
                ga = StandardGeneticAlgorithm(i, j, k, gap)
                helpers.IterRangeExperiment(name, ga, points, iterRange, mat, row)
Example #25
0
def main(P, mate, mutate):
    """Run this experiment"""
    training_ints = initialize_instances('./clean_data/adult_train.txt')
    testing_ints = initialize_instances('./clean_data/adult_test.txt')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    logunit = LogisticSigmoid()
    rule = RPROPUpdateRule()
    oa_name = "\nGA_tuning: %d , %d, %d\n" % (P, mate, mutate)
    classification_network = factory.createClassificationNetwork(
        [INPUT_LAYER, HIDDEN_LAYER, OUTPUT_LAYER])
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network,
                                            measure)
    oa = StandardGeneticAlgorithm(P, mate, mutate, nnop)
    train(oa, classification_network, oa_name, training_ints, testing_ints,
          measure)
Example #26
0
    def run_experiment(self, train, test, validation):
        """Run experiment

        Args:
            train (list): List of training instances.
            test (list): List of test instances.
            validation (list): List of validation instances.

        """
        factory = BackPropagationNetworkFactory()  # instantiate main NN class
        params = [
            self.input_layer, self.hidden_layer_one, self.hidden_layer_two,
            self.output_layer
        ]
        self.network = factory.createClassificationNetwork(params)
        dataset = DataSet(train)  # setup training instances dataset
        nnop = NeuralNetworkOptimizationProblem(dataset, self.network,
                                                self.measure)
        oa = None

        # get output file name
        outpath = 'results/NN'
        filename = None

        # options for different optimization algorithms
        if self.oaName == 'BP':
            filename = '{}/results.csv'.format(self.oaName)
            rule = RPROPUpdateRule()
            oa = BatchBackPropagationTrainer(dataset, self.network,
                                             self.measure, rule)
        elif self.oaName == 'RHC':
            filename = '{}/results.csv'.format(self.oaName)
            oa = RandomizedHillClimbing(nnop)
        elif self.oaName == 'SA':
            filename = '{}/results_{}_{}.csv'.format(self.oaName, self.SA_T,
                                                     self.SA_C)
            oa = SimulatedAnnealing(self.SA_T, self.SA_C, nnop)
        elif self.oaName == 'GA':
            filename = '{}/results_{}_{}_{}.csv'.format(
                self.oaName, self.GA_P, self.GA_MA, self.GA_MU)
            oa = StandardGeneticAlgorithm(self.GA_P, self.GA_MA, self.GA_MU,
                                          nnop)

        # train network
        filepath = get_abspath(filename, outpath)
        self.train(oa, train, test, validation, filepath)
Example #27
0
def initialize_networks_and_optimization(networks, nnop, oa, instances,
                                         factory=BackPropagationNetworkFactory(),
                                         measure = SumOfSquaresError()):
    del networks[:]
    del nnop[:]
    del oa[:]

    data_set = DataSet(instances)
    for _ in OA_NAMES:
        classification_network = factory.createClassificationNetwork([
            INPUT_LAYER, HIDDEN_LAYER, OUTPUT_LAYER], LogisticSigmoid())
        networks.append(classification_network)
        nnop.append(NeuralNetworkOptimizationProblem(data_set, classification_network, measure))

    oa.append(RandomizedHillClimbing(nnop[0]))
    oa.append(SimulatedAnnealing(1E11, .95, nnop[1]))
    oa.append(StandardGeneticAlgorithm(200, 100, 10, nnop[2]))
def main():
    """Run this experiment"""
    training_ints = initialize_instances(PATH + "X_train.csv")
    testing_ints = initialize_instances(PATH + "X_test.csv")
    validation_ints = initialize_instances(PATH + "y_train.csv")
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    logistic_sigmoid = LogisticSigmoid()
    data_set = DataSet(training_ints)
    data_set_size = data_set.size()
    classification_network = factory.createClassificationNetwork(
        [INPUT_LAYER, HIDDEN_LAYER1, OUTPUT_LAYER], logistic_sigmoid)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network,
                                            measure)
    oa = StandardGeneticAlgorithm(data_set_size, int(0.5 * data_set_size),
                                  int(0.1 * data_set_size), nnop)
    train(oa, classification_network, 'GA', training_ints, validation_ints,
          testing_ints, measure)
Example #29
0
def main():
    """Run algorithms on the abalone dataset."""
    train_instances = initialize_instances(TRAIN_FILE)
    test_instances = initialize_instances(TEST_FILE)

    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(train_instances)

    networks = []  # BackPropagationNetwork
    nnop = []  # NeuralNetworkOptimizationProblem
    oa = []  # OptimizationAlgorithm
    params = [(200, 100, 25),
              (200, 100, 50)]

    oa_names = [','.join(map(str, item)) for item in params]

    results = ""

    for name in oa_names:
        classification_network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER_1, HIDDEN_LAYER_1, OUTPUT_LAYER])
        networks.append(classification_network)
        nnop.append(NeuralNetworkOptimizationProblem(data_set, classification_network, measure))

    for num, params in enumerate(params):
        oa.append(StandardGeneticAlgorithm(params[0], params[1], params[2], nnop[num]))

    result_file = open(WRITE_DIR, "w")

    for i, name in enumerate(oa_names):
        start = time.time()

        train(oa[i], networks[i], oa_names[i], train_instances, measure, result_file)
        end = time.time()
        training_time = end - start

        optimal_instance = oa[i].getOptimal()
        networks[i].setWeights(optimal_instance.getData())

        test(test_instances, networks[i], name, result_file)

        print "finished training, " + name

    print results
Example #30
0
def Genetic_algorithm(out_path, train_inst, test_inst, P, mate, mutate,
                      training_iterations):
    """Run this experiment"""
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(train_inst)
    # acti = LogisticSigmoid()
    acti = HyperbolicTangentSigmoid()
    rule = RPROPUpdateRule()

    oa_name = "GA_P{}_mate{}_mut{}".format(P, mate, mutate)
    with open(out_path.replace('GA_', oa_name), 'w') as f:
        f.write('{},{},{},{},{},{}\n'.format('iteration', 'MSE_trg', 'MSE_tst',
                                             'acc_trg', 'acc_tst', 'elapsed'))

    classification_network = factory.createClassificationNetwork(
        [INPUT_LAYER, HIDDEN_LAYER1, HIDDEN_LAYER2, OUTPUT_LAYER], acti)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network,
                                            measure)
    oa = StandardGeneticAlgorithm(P, mate, mutate, nnop)
    train(oa, classification_network, oa_name, train_inst, test_inst, measure,
          training_iterations, out_path.replace('GA_', oa_name))
Example #31
0
                f.write(st)

# GA
for t in range(numTrials):
    for pop, mate, mutate in product([100], [50, 30, 10], [50, 30, 10]):
        fname = outfile.format('GA{}_{}_{}'.format(pop, mate, mutate),
                               str(t + 1))
        with open(fname, 'w') as f:
            f.write('iterations,fitness,time,fevals\n')
        ef = FlipFlopEvaluationFunction()
        odd = DiscreteUniformDistribution(ranges)
        nf = DiscreteChangeOneNeighbor(ranges)
        mf = DiscreteChangeOneMutation(ranges)
        cf = SingleCrossOver()
        gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
        ga = StandardGeneticAlgorithm(pop, mate, mutate, gap)
        fit = FixedIterationTrainer(ga, 10)
        times = [0]
        for i in range(0, maxIters, 10):
            start = clock()
            fit.train()
            elapsed = time.clock() - start
            times.append(times[-1] + elapsed)
            fevals = ef.fevals
            score = ef.value(ga.getOptimal())
            ef.fevals -= 1
            st = '{},{},{},{}\n'.format(i, score, times[-1], fevals)
            print(st)
            with open(fname, 'a') as f:
                f.write(st)
    i += 1
    max = ef.value(rhc0.getOptimal())
    print "rhc0,", i,",", max
goal = max
pop0 = GenericProbabilisticOptimizationProblem(ef, odd, df)
mimic0 = MIMIC(200, 100, pop)
i = 0
while ( i< timeout/1000):
    mimic0.train()
    i += 1
    max = ef.value(mimic0.getOptimal())
    print "mimic0,", i,",", max
if (max > goal):
    goal = max
gap0 = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
ga0 = StandardGeneticAlgorithm(200, 100, 25, gap0)
i = 0
while ( i< timeout/1000):
    ga0.train()
    i += 1
    max = ef.value(ga0.getOptimal())
    print "ga0,", i,",", max
if (max > goal):
    goal = max

# run RHC
rhc = RandomizedHillClimbing(hcp)
max = 0
i = 0
while (max < goal and i < timeout):
    rhc.train()
Example #33
0
def main():

    # The number of items
    NUM_ITEMS = 40
    # The number of copies each
    COPIES_EACH = 4
    # The maximum weight for a single element
    MAX_WEIGHT = 50
    # The maximum volume for a single element
    MAX_VOLUME = 50

    iterations = 20000
    gaIters = 1000
    mimicIters = 1000
    gaPop = 200
    gaMate = 150
    gaMutate = 25
    mimicSamples = 200
    mimicToKeep = 100
    saTemp = 100
    saCooling = .95
    alg = 'all'
    run = 0
    settings = []

    try:
        opts, args = getopt.getopt(sys.argv[1:], "ahrsgmn:N:c:w:v:i:", ["gaIters=", "mimicIters=","gaPop=", "gaMate=", "gaMutate=", "mimicSamples=", "mimicToKeep=", "saTemp=", "saCooling="])
    except:
        print 'knapsack.py -i <iterations> -n <NUM_ITEMS> -c <COPIES_EACH> -w <MAX_WEIGHT> -v <MAX_VOLUME>'
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print 'knapsack.py -i <iterations> -n <NUM_ITEMS> -c <COPIES_EACH> -w <MAX_WEIGHT> -v <MAX_VOLUME>'
            sys.exit(1)
        elif opt == '-i':
            iterations = int(arg)
        elif opt == '-N':
            NUM_ITEMS = int(arg)
        elif opt == '-c':
            COPIES_EACH = int(arg)
        elif opt == '-w':
            MAX_WEIGHT = int(arg)
        elif opt == '-v':
            MAX_VOLUME = int(arg)
        elif opt == '-n':
            run = int(arg)
        elif opt == '-r':
            alg = 'RHC'
        elif opt == '-s':
            alg = 'SA'
        elif opt == '-g':
            alg = 'GA'
        elif opt == '-m':
            alg = 'MIMIC'
        elif opt == '-a':
            alg = 'all'
        elif opt == '--gaPop':
            gaPop = int(arg)
        elif opt == '--gaMate':
            gaMate = int(arg)
        elif opt == '--gaMutate':
            gaMutate = int(arg)
        elif opt == '--mimicSamples':
            mimicSamples = int(arg)
        elif opt == '--mimicToKeep':
            mimicToKeep = int(arg)
        elif opt == '--saTemp':
            saTemp = float(arg)
        elif opt == '--saCooling':
            saCooling = float(arg)
        elif opt == '--gaIters':
            gaIters = int(arg)
        elif opt == '--mimicIters':
            mimicIters = int(arg)
    vars ={
    'NUM_ITEMS' : NUM_ITEMS,
    'COPIES_EACH' : COPIES_EACH,
    'MAX_WEIGHT' : MAX_WEIGHT,
    'MAX_VOLUME' : MAX_VOLUME,
    'iterations' : iterations,
    'gaIters' : gaIters,
    'mimicIters' : mimicIters,
    'gaPop' : gaPop,
    'gaMate' : gaMate,
    'gaMutate' : gaMutate,
    'mimicSamples' : mimicSamples,
    'mimicToKeep' : mimicToKeep,
    'saTemp' : saTemp,
    'saCooling' : saCooling,
    'alg' : alg,
    'run' : run
    }

    settings = getSettings(alg, settings, vars)
    # Random number generator */
    random = Random()

    # The volume of the knapsack
    KNAPSACK_VOLUME = MAX_VOLUME * NUM_ITEMS * COPIES_EACH * .4

    # create copies
    fill = [COPIES_EACH] * NUM_ITEMS
    copies = array('i', fill)

    # create weights and volumes
    fill = [0] * NUM_ITEMS
    weights = array('d', fill)
    volumes = array('d', fill)
    for i in range(0, NUM_ITEMS):
        weights[i] = random.nextDouble() * MAX_WEIGHT
        volumes[i] = random.nextDouble() * MAX_VOLUME


    # create range
    fill = [COPIES_EACH + 1] * NUM_ITEMS
    ranges = array('i', fill)

    ef = KnapsackEvaluationFunction(weights, volumes, KNAPSACK_VOLUME, copies)
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = UniformCrossOver()
    df = DiscreteDependencyTree(.1, ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

    if alg == 'RHC' or alg == 'all':
        rhc = RandomizedHillClimbing(hcp)
        fit = FixedIterationTrainer(rhc, iterations)
        fit.train()
        print "RHC: " + str(ef.value(rhc.getOptimal()))
        rows = []
        row = []
        row.append("Evaluation Function Value")
        row.append(str(ef.value(rhc.getOptimal())))
        rows.append(row)
        output2('Knapsack', 'RHC', rows, settings)
        rows = []
        buildFooter("Knapsack", "RHC", rows, settings)
        outputFooter("Knapsack", "RHC", rows , settings)
    if alg == 'SA' or alg == 'all':
        sa = SimulatedAnnealing(saTemp, saCooling, hcp)
        fit = FixedIterationTrainer(sa, iterations)
        fit.train()
        rows = []
        row = []
        row.append("Evaluation Function Value")
        row.append(ef.value(sa.getOptimal()))
        rows.append(row)
        print "SA: " + str(ef.value(sa.getOptimal()))
        output2('Knapsack', 'SA', rows, settings)
        rows = []
        buildFooter("Knapsack", "SA", rows, settings)
        outputFooter("Knapsack", "SA", rows, settings)
    if alg == 'GA' or alg == 'all':
        ga = StandardGeneticAlgorithm(gaPop, gaMate, gaMutate, gap)
        fit = FixedIterationTrainer(ga, gaIters)
        fit.train()
        rows = []
        row = []
        row.append("Evaluation Function Value")
        row.append(ef.value(ga.getOptimal()))
        rows.append(row)
        print "GA: " + str(ef.value(ga.getOptimal()))
        output2('Knapsack', 'GA', rows, settings)
        buildFooter("Knapsack", "GA", rows, settings)
        outputFooter("Knapsack", "GA", rows , settings)
    if alg == 'MIMIC' or alg == 'all':
        mimic = MIMIC(mimicSamples, mimicToKeep, pop)
        fit = FixedIterationTrainer(mimic, mimicIters)
        fit.train()
        print "MIMIC: " + str(ef.value(mimic.getOptimal()))
        rows = []
        row = []
        row.append("Evaluation Function Value")
        row.append(ef.value(mimic.getOptimal()))
        rows.append(row)
        output2('Knapsack', 'MIMIC', rows, settings)
        rows = []
        buildFooter("Knapsack", "MIMIC", rows, settings)
        outputFooter("Knapsack", "MIMIC", rows , settings)
print "    RHC test confusion matrix:", confusionMatrix(network_rhc, test)

# learn weights with simulated annealing
network_sa = factory.createClassificationNetwork([inputLayer, hiddenLayer, outputLayer])
nnop_sa = NeuralNetworkOptimizationProblem(set, network_sa, measure)
sa = SimulatedAnnealing(1E11, 0.95, nnop_sa)
fit = FixedIterationTrainer(sa, it_sa)
fit.train()
op = sa.getOptimal();
network_sa.setWeights(op.getData())
print "\nSA training error:", errorRate(network_sa, train)
print "SA training confusion matrix:", confusionMatrix(network_sa, train)
print "    SA test error:", errorRate(network_sa, test)
print "    SA test confusion matrix:", confusionMatrix(network_sa, test)

exit()

# learn weights with generic algorithms
network_ga = factory.createClassificationNetwork([inputLayer, hiddenLayer, outputLayer])
nnop_ga = NeuralNetworkOptimizationProblem(set, network_ga, measure)
ga = StandardGeneticAlgorithm(200, 100, 10, nnop_ga)
fit = FixedIterationTrainer(ga, it_ga)
fit.train()
op = ga.getOptimal();
network_ga.setWeights(op.getData())
print "\nGA training error:", errorRate(network_ga, train)
print "GA training confusion matrix:", confusionMatrix(network_ga, train)
print "    GA test error:", errorRate(network_ga, test)
print "    GA test confusion matrix:", confusionMatrix(network_ga, test)

Example #35
0
def main():
    N=200
    tempDenom = 5
    T=N/tempDenom
    fill = [2] * N
    ranges = array('i', fill)
    iterations = 2000
    gaIters = 1000
    mimicIters = 1000
    gaPop = 200
    gaMate = 100
    gaMutate = 10
    mimicSamples = 200
    mimicToKeep = 20
    saTemp = 1E11
    saCooling = .95
    alg = 'all'
    run = 0
    settings = []

    try:
       opts, args = getopt.getopt(sys.argv[1:], "ahn:rsgN:m:t:i:", ["gaIters=", "mimicIters=","gaPop=", "gaMate=", "gaMutate=", "mimicSamples=", "mimicToKeep=", "saTemp=", "saCooling="])
    except:
       print 'knapsack.py -i <iterations> -n <NUM_ITEMS> -c <COPIES_EACH> -w <MAX_WEIGHT> -v <MAX_VOLUME>'
       sys.exit(2)
    for opt, arg in opts:
       if opt == '-h':
          print 'knapsack.py -i <iterations> -n <NUM_ITEMS> -c <COPIES_EACH> -w <MAX_WEIGHT> -v <MAX_VOLUME>'
          sys.exit(1)
       elif opt == '-i':
          iterations = int(arg)
       elif opt == '-N':
          N = int(arg)
       elif opt == '-t':
           T = float(arg)
       elif opt == '-d':
          tempDenom = int(arg)
       elif opt == '-r':
           alg = 'RHC'
       elif opt == '-a':
           alg = 'all'
       elif opt == '-s':
           alg = 'SA'
       elif opt == '-g':
           alg = 'GA'
       elif opt == '-m':
           alg = 'MIMIC'
       elif opt == '--gaPop':
          gaPop = int(arg)
       elif opt == '--gaMate':
          gaMate = int(arg)
       elif opt == '--gaMutate':
          gaMutate = int(arg)
       elif opt == '--mimicSamples':
          mimicSamples = int(arg)
       elif opt == '--mimicToKeep':
          mimicToKeep = int(arg)
       elif opt == '--saTemp':
          saTemp = float(arg)
       elif opt == '--saCooling':
          saCooling = float(arg)
       elif opt == '--gaIters':
          gaIters = int(arg)
       elif opt == '--mimicIters':
          mimicIters = int(arg)
       elif opt == '-n':
           run = int(arg)


    vars = {
        'N':N,
        'tempDenom':tempDenom,
        'T':T,
        'fill':fill,
        'ranges':ranges,
        'iterations' :iterations,
        'gaIters':gaIters,
        'mimicIters':mimicIters,
        'gaPop' :gaPop,
        'gaMate' :gaMate,
        'gaMutate' :gaMutate,
        'mimicSamples' : mimicSamples,
        'mimicToKeep' : mimicToKeep,
        'saTemp' : saTemp,
        'saCooling' : saCooling,
        'alg' : alg,
        'run' : run
    }

    settings = getSettings(alg, settings, vars)

    T=N/tempDenom
    fill = [2] * N
    ranges = array('i', fill)

    ef = FourPeaksEvaluationFunction(T)
    odd = DiscreteUniformDistribution(ranges)
    nf = DiscreteChangeOneNeighbor(ranges)
    mf = DiscreteChangeOneMutation(ranges)
    cf = SingleCrossOver()
    df = DiscreteDependencyTree(.1, ranges)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

    if alg == 'RHC' or alg == 'all':
        rhc = RandomizedHillClimbing(hcp)
        fit = FixedIterationTrainer(rhc, iterations)
        fit.train()
        rows = []
        row = []
        row.append("Evaluation Function Value")
        row.append(ef.value(rhc.getOptimal()))
        rows.append(row)
        print "RHC: " + str(ef.value(rhc.getOptimal()))
        output2('4Peaks', 'RHC', rows, settings)
        rows = []
        buildFooter("4Peaks", "RHC", rows, settings),
        outputFooter("4Peaks", "RHC", rows,   settings)

    if alg == 'SA' or alg == 'all':
        sa = SimulatedAnnealing(saTemp, saCooling, hcp)
        fit = FixedIterationTrainer(sa, iterations)
        fit.train()
        rows = []
        row = []
        row.append("Evaluation Function Value")
        row.append(ef.value(sa.getOptimal()))
        rows.append(row)
        print "SA: " + str(ef.value(sa.getOptimal()))
        output2('4Peaks', 'SA', rows, settings)
        rows = []
        buildFooter("4Peaks", "SA", rows, settings)
        outputFooter("4Peaks", "SA", rows, settings)

    if alg == 'GA' or alg == 'all':
        ga = StandardGeneticAlgorithm(gaPop, gaMate, gaMutate, gap)
        fit = FixedIterationTrainer(ga, gaIters)
        fit.train()
        print "GA: " + str(ef.value(ga.getOptimal()))
        rows = []
        row = []
        row.append("Evaluation Function Value")
        row.append(ef.value(ga.getOptimal()))
        rows.append(row)
        output2('4Peaks', 'GA', rows, settings)
        rows = []
        buildFooter("4Peaks", "GA", rows, settings)
        outputFooter("4Peaks", "GA", rows , settings)

    if alg == 'MIMIC' or alg == 'all':
        mimic = MIMIC(mimicSamples, mimicToKeep, pop)
        fit = FixedIterationTrainer(mimic, mimicIters)
        fit.train()
        print "MIMIC: " + str(ef.value(mimic.getOptimal()))
        rows = []
        row = []
        row.append("Evaluation Function Value")
        row.append(ef.value(mimic.getOptimal()))
        rows.append(row)
        output2('4Peaks', 'MIMIC', rows, settings)
        rows = []
        buildFooter("4Peaks", "GA", rows, settings)
        outputFooter("4Peaks", "MIMIC", rows, settings)
Example #36
0
for x in range(0, N):
    path.append(rhc.getOptimal().getDiscrete(x))
print path

sa = SimulatedAnnealing(1e12, 0.999, hcp)
fit = FixedIterationTrainer(sa, 200000)
fit.train()
print "SA Inverse of Distance: " + str(ef.value(sa.getOptimal()))
print "Route:"
path = []
for x in range(0, N):
    path.append(sa.getOptimal().getDiscrete(x))
print path


ga = StandardGeneticAlgorithm(2000, 1500, 250, gap)
fit = FixedIterationTrainer(ga, 1000)
fit.train()
print "GA Inverse of Distance: " + str(ef.value(ga.getOptimal()))
print "Route:"
path = []
for x in range(0, N):
    path.append(ga.getOptimal().getDiscrete(x))
print path


# for mimic we use a sort encoding
ef = TravelingSalesmanSortEvaluationFunction(points)
fill = [N] * N
ranges = array("i", fill)
odd = DiscreteUniformDistribution(ranges)
Example #37
0
def main():

    iterations = 200000
    alg = 'all'
    gaPop = 2000
    gaMate = 1500
    gaMutate = 250
    mimicSamples = 500
    mimicToKeep = 100
    saTemp = 1E12
    saCooling = .999
    gaIters = 1000
    mimicIters = 1000
    run = 0
    settings = []

    try:
        opts, args = getopt.getopt(sys.argv[1:], "ahrsgmn:i:", ["gaIters=", "mimicIters=", "gaPop=", "gaMate=", "gaMutate=", "mimicSamples=", "mimicToKeep=", "saTemp=", "saCooling="])
    except:
        print 'travelingsalesman.py -i <iterations>'
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print 'travelingsalesman.py -i <iterations>'
            sys.exit(1)
        elif opt == '-i':
            if arg < 1:
                print 'Iterations must be greater than 0'
                sys.exit(2)
            iterations = int(arg)
        elif opt == '-a':
            alg = 'all'
        elif opt == '-r':
            alg = 'RHC'
        elif opt == '-s':
            alg = 'SA'
        elif opt == '-g':
            alg = 'GA'
        elif opt == '-m':
            alg = 'MIMIC'
        elif opt == '--gaPop':
            if arg < 1:
                print 'Population must be greater than 0'
                sys.exit(2)
            gaPop = int(arg)
        elif opt == '--gaMate':
            if arg < 1:
                print 'Mating must be greater than 0'
                sys.exit(2)
            gaMate = int(arg)
        elif opt == '--gaMutate':
            if arg < 1:
                print 'Mutators must be greater than 0'
                sys.exit(2)
            gaMutate = int(arg)
        elif opt == '--mimicSamples':
            if arg < 1:
                print 'MIMIC samples must be greater than 0'
                sys.exit(2)
            mimicSamples = int(arg)
        elif opt == '--mimicToKeep':
            if arg < 1:
                print 'MIMIC to keep must be greater than 0'
                sys.exit(2)
            mimicToKeep = int(arg)
        elif opt == '--saTemp':
            saTemp = float(arg)
        elif opt == '--saCooling':
            saCooling = float(arg)
        elif opt == '-n':
            run = int(arg)
        elif opt == '--gaIters':
            if arg < 1:
                print 'GA Iterations must be greater than 0'
                sys.exit(2)
            gaIters = int(arg)
        elif opt == '--mimicIters':
            if arg < 1:
                print 'MIMIC Iterations must be greater than 0'
                sys.exit(2)
            mimicIters = int(arg)

    vars = {
            'iterations' : iterations,
            'alg' : alg,
            'gaPop' : gaPop,
            'gaMate' : gaMate,
            'gaMutate' : gaMutate,
            'mimicSamples' : mimicSamples,
            'mimicToKeep' : mimicToKeep,
            'saTemp' : saTemp,
            'saCooling' : saCooling,
            'gaIters' : gaIters,
            'mimicIters' : mimicIters,
            'run' : run
            }

    settings = getSettings(alg, settings, vars)
    if gaPop < gaMate or gaPop < gaMutate or gaMate < gaMutate:
        pebkac({gaPop: 'total population',gaMate : 'mating population', gaMutate : 'mutating population'}, alg, 'total population', settings)
    if mimicSamples < mimicToKeep:
        pebkac({mimicSamples: 'mimic samples', mimicToKeep : 'mimic to keep'}, alg, 'mimic samples', settings)
    prob = 'Traveling Sales Problem'
    invDist = {}
    cities = CityList()
    N = len(cities)
    #random = Random()
    points = [[0 for x in xrange(2)] for x in xrange(N)]
    for i in range(0, len(points)):
        coords = cities.getCoords(i)
        points[i][0] = coords[0]
        points[i][1] = coords[1]
    ef = TravelingSalesmanRouteEvaluationFunction(points)
    odd = DiscretePermutationDistribution(N)
    nf = SwapNeighbor()
    mf = SwapMutation()
    cf = TravelingSalesmanCrossOver(ef)
    hcp = GenericHillClimbingProblem(ef, odd, nf)
    gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
    rows = []


    if alg == 'RHC' or alg == 'all':
        print '\n----------------------------------'
        print 'Using Random Hill Climbing'
        for label, setting in settings:
            print label + ":" + str(setting)
        rhc = RandomizedHillClimbing(hcp)
        fit = FixedIterationTrainer(rhc, iterations)
        fit.train()
        path = []
        for x in range(0,N):
            path.append(rhc.getOptimal().getDiscrete(x))
        output(prob, 'RHC', path, points, settings)
        rows = []
        row = []
        row.append("Inverse of Distance")
        row.append(ef.value(rhc.getOptimal()))
        rows.append(row)
        invDist['RHC'] = ef.value(rhc.getOptimal())
        buildFooter(prob, 'RHC', rows, settings)
        outputFooter(prob, 'RHC', rows, settings)


    if alg == 'SA' or alg == 'all':
        print 'Using Simulated Annealing'
        for label, setting in settings:
            print label + ":" + str(setting)
        sa = SimulatedAnnealing(saTemp, saCooling, hcp)
        fit = FixedIterationTrainer(sa, iterations)
        fit.train()
        path = []
        for x in range(0,N):
            path.append(sa.getOptimal().getDiscrete(x))
        output(prob, 'SA', path, points, settings)
        rows = []
        row = []
        row.append("Inverse of Distance")
        row.append(ef.value(sa.getOptimal()))
        rows.append(row)
        invDist['SA'] = ef.value(sa.getOptimal())
        buildFooter(prob, 'SA', rows, settings)
        outputFooter(prob, 'SA', rows, settings)

    if alg == 'GA' or alg == 'all':
        print '\n----------------------------------'
        print 'Using Genetic Algorithm'
        for label, setting in settings:
            print label + ":" + str(setting)
        ga = StandardGeneticAlgorithm(gaPop, gaMate, gaMutate, gap)
        fit = FixedIterationTrainer(ga, gaIters)
        fit.train()
        path = []
        for x in range(0,N):
            path.append(ga.getOptimal().getDiscrete(x))
        output(prob, 'GA', path, points, settings)
        rows = []
        row = []
        row.append("Inverse of Distance")
        row.append(ef.value(ga.getOptimal()))
        rows.append(row)
        invDist['GA'] = ef.value(ga.getOptimal())
        buildFooter(prob, 'GA', rows, settings)
        outputFooter(prob, 'GA', rows, settings)

    if alg == 'MIMIC' or alg == 'all':
        print '\n----------------------------------'
        print 'Using MIMIC'
        for label, setting in settings:
            print label + ":" + str(setting)
        # for mimic we use a sort encoding
        ef = TravelingSalesmanSortEvaluationFunction(points);
        fill = [N] * N
        ranges = array('i', fill)
        odd = DiscreteUniformDistribution(ranges);
        df = DiscreteDependencyTree(.1, ranges);
        pop = GenericProbabilisticOptimizationProblem(ef, odd, df);
        mimic = MIMIC(mimicSamples, mimicToKeep, pop)
        fit = FixedIterationTrainer(mimic, mimicIters)
        fit.train()
        path = []
        optimal = mimic.getOptimal()
        fill = [0] * optimal.size()
        ddata = array('d', fill)
        for i in range(0,len(ddata)):
            ddata[i] = optimal.getContinuous(i)
        order = ABAGAILArrays.indices(optimal.size())
        ABAGAILArrays.quicksort(ddata, order)
        output(prob, 'MIMIC', order, points, settings)
        rows = []
        row = []
        row.append("Inverse of Distance")
        row.append(ef.value(mimic.getOptimal()))
        rows.append(row)
        invDist['MIMIC'] = ef.value(mimic.getOptimal())
        buildFooter(prob, 'MIMIC', rows, settings)
        outputFooter(prob, 'MIMIC', rows, settings)


    maxn = max(len(key) for key in invDist)
    maxd = max(len(str(invDist[key])) for key in invDist)
    print "Results"
    for result in invDist:
        print "%-*s %s %-*s" % (len('Best Alg') + 2, result, ':', maxd, invDist[result])
    if alg == 'all':
        print "%-*s %s %-*s" % (len('Best Alg') + 2, 'Best Alg', ':', maxd, max(invDist.iterkeys(), key=(lambda key: invDist[key])))
    print '----------------------------------'
Example #38
0
ef = ContinuousPeaksEvaluationFunction(T)
odd = DiscreteUniformDistribution(ranges)
nf = DiscreteChangeOneNeighbor(ranges)
mf = DiscreteChangeOneMutation(ranges)
cf = SingleCrossOver()
df = DiscreteDependencyTree(.1, ranges)
hcp = GenericHillClimbingProblem(ef, odd, nf)
gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)
pop = GenericProbabilisticOptimizationProblem(ef, odd, df)

rhc = RandomizedHillClimbing(hcp)
fit = FixedIterationTrainer(rhc, 200000)
fit.train()
print "RHC: " + str(ef.value(rhc.getOptimal()))

sa = SimulatedAnnealing(1E11, .95, hcp)
fit = FixedIterationTrainer(sa, 200000)
fit.train()
print "SA: " + str(ef.value(sa.getOptimal()))

ga = StandardGeneticAlgorithm(200, 100, 10, gap)
fit = FixedIterationTrainer(ga, 1000)
fit.train()
print "GA: " + str(ef.value(ga.getOptimal()))

mimic = MIMIC(200, 20, pop)
fit = FixedIterationTrainer(mimic, 1000)
fit.train()
print "MIMIC: " + str(ef.value(mimic.getOptimal()))

runs = 10
# N=200
T = N / 5
fill = [2] * N
ranges = array("i", fill)

ef = FourPeaksEvaluationFunction(T)
odd = DiscreteUniformDistribution(ranges)
mf = DiscreteChangeOneMutation(ranges)

# print "Ga settings:\npop:%d\ncrossovertype:%d\ncrossoverrate:%d\nmutationrate:%d\n\n" % (ga_pop,co_type,ga_keep,ga_mut_type)

gap = GenericGeneticAlgorithmProblem(ef, odd, mf, cf)

t0 = time.time()
calls = []
results = []
for _ in range(runs):
    # ga_pop = N*5
    ga = StandardGeneticAlgorithm(ga_pop, ga_keep, ga_mut, gap)
    fit = FixedIterationTrainer(ga, 1000)
    fitness = fit.train()
    results.append(ef.value(ga.getOptimal()))
    calls.append(ef.getTotalCalls())
    ef.clearCount()
print "GA, average results , " + str(sum(results) / float(runs))
print "GA, average feval calls , " + str(sum(calls) / float(runs))
t1 = time.time() - t0
print "GA, average time , " + str(t1 / float(runs))