Exemple #1
0
def main(P, mate, mutate):
    """Run this experiment"""
    training_ints = initialize_instances(
        '/Users/Sean/School/GeorgiaTech/CS7641/Assignment2/s_trg.csv')
    testing_ints = initialize_instances(
        '/Users/Sean/School/GeorgiaTech/CS7641/Assignment2/s_test.csv')
    validation_ints = initialize_instances(
        '/Users/Sean/School/GeorgiaTech/CS7641/Assignment2/s_val.csv')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    sig = LogisticSigmoid()
    rule = RPROPUpdateRule()
    oa_name = "GA_%s_%s_%s" % (P, mate, mutate)
    with open(OUTFILE.replace('XXX', oa_name), 'w') as f:
        f.write('%s,%s,%s,%s,%s,%s,%s,%s\n' %
                ('iteration', 'MSE_trg', 'MSE_val', 'MSE_tst', 'acc_trg',
                 'acc_val', 'acc_tst', 'elapsed'))
    classification_network = factory.createClassificationNetwork([
        INPUT_LAYER, HIDDEN_LAYER1, HIDDEN_LAYER2, HIDDEN_LAYER3, OUTPUT_LAYER
    ], sig)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network,
                                            measure)
    for trial in xrange(TRIALS):
        oa = StandardGeneticAlgorithm(P, mate, mutate, nnop)
        train(oa, classification_network, oa_name, training_ints,
              validation_ints, testing_ints, measure)
Exemple #2
0
def main(P, mate, mutate, layers, training_iterations, test_data_file, train_data_file, validate_data_file):
    """Run this experiment"""
    training_ints = base.initialize_instances(train_data_file)
    testing_ints = base.initialize_instances(test_data_file)
    validation_ints = base.initialize_instances(validate_data_file)
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    # relu = RELU()
    relu = LogisticSigmoid()
    # 50 and 0.000001 are the defaults from RPROPUpdateRule.java
    rule = RPROPUpdateRule(0.064, 50, 0.000001)
    oa_name = "GA_{}_{}_{}".format(P, mate, mutate)
    with open(OUTFILE.format(oa_name), 'w') as f:
        f.write('{},{},{},{},{},{},{},{},{},{},{}\n'.format('iteration', 'MSE_trg', 'MSE_val', 'MSE_tst', 'acc_trg',
                                                            'acc_val', 'acc_tst', 'f1_trg', 'f1_val', 'f1_tst',
                                                            'elapsed'))
    classification_network = factory.createClassificationNetwork(
        layers, relu)
    nnop = NeuralNetworkOptimizationProblem(
        data_set, classification_network, measure)
    oa = StandardGeneticAlgorithm(P, mate, mutate, nnop)
    base.train(oa, classification_network, oa_name, training_ints, validation_ints, testing_ints, measure,
               training_iterations, OUTFILE.format(oa_name))
    return
Exemple #3
0
def main():
    training_ints = initialize_instances('data/bank_train.csv')
    testing_ints = initialize_instances('data/bank_test.csv')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    acti = LogisticSigmoid()
    rule = RPROPUpdateRule()
    ######################### back prop #####################

    classification_network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER1, OUTPUT_LAYER],acti)
    train(BatchBackPropagationTrainer(data_set,classification_network,measure,rule,), 
            classification_network,
            'Backprop', 
            training_ints,testing_ints, measure,
            './ANN/BP/BACKPROP_LOG.csv',
            2000)

    ######################### simulated annealing #################

    for CE in [0.15,0.35,0.55,0.75,0.95]:
        for  T in [1e8,1e10,1e12]:
            classification_network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER1, OUTPUT_LAYER],acti)
            nnop = NeuralNetworkOptimizationProblem(data_set, classification_network, measure)
            oFile = "./ANN/SA/%s_%s_LOG.csv"%(CE,T)
            train(SimulatedAnnealing(T, CE, nnop), 
            classification_network, 
            'simulated annealing', 
            training_ints, testing_ints, measure,
            oFile,
            2000)
    
    ######################### random hill climbing #################

    classification_network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER1, OUTPUT_LAYER],acti)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network, measure)
    train(RandomizedHillClimbing(nnop), 
        classification_network, 
        'RHC', 
        training_ints, testing_ints, measure,
        './ANN/RHC/RHC_LOG.csv',
        2000)

    ######################### genetic algorithm #################
    
    for P in [100]:
        for mate in [5, 15, 30]:
            for mutate in [5,15,30]:
                classification_network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER1, OUTPUT_LAYER],acti)
                nnop = NeuralNetworkOptimizationProblem(data_set, classification_network, measure)
                oFile = "./ANN/GA/%s_%s_%s_LOG.csv"%(P, mate, mutate)
                train(StandardGeneticAlgorithm(P, mate, mutate, nnop), 
                    classification_network, 
                    'GA', 
                    training_ints, testing_ints, measure,
                    oFile,
                    2000)
Exemple #4
0
def main():
    """Run this experiment"""
    training_ints = initialize_instances('titanic_train.csv')
    testing_ints = initialize_instances('titanic_test.csv')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    acti = LogisticSigmoid()
    rule = RPROPUpdateRule()
    oa_names = ["Backprop"]
    classification_network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER1, OUTPUT_LAYER],acti)
    train(BatchBackPropagationTrainer(data_set,classification_network,measure,rule), classification_network, 'Backprop', training_ints,testing_ints, measure)
def main():
    """Run this experiment"""
    training_ints = initialize_instances('Cryo_train.csv')
    testing_ints = initialize_instances('Cryo_test.csv')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    acti = LogisticSigmoid()
    rule = RPROPUpdateRule()
    classification_network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER1, OUTPUT_LAYER],acti)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network, measure)
    oa = RandomizedHillClimbing(nnop)
    train(oa, classification_network, 'RHC', training_ints, testing_ints, measure)
Exemple #6
0
def main(P,mate,mutate):
    """Run this experiment"""
    training_ints = initialize_instances('spambase_train.csv')
    testing_ints = initialize_instances('spambase_test.csv')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    acti = LogisticSigmoid()
    rule = RPROPUpdateRule()
    oa_name = "GA_{}_{}_{}".format(P,mate,mutate)
    with open(OUTFILE.replace('XXX',oa_name),'w') as f:
        f.write('{},{},{},{},{},{}\n'.format('iteration','MSE_trg','MSE_tst','acc_trg','acc_tst','elapsed'))
    classification_network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER1, OUTPUT_LAYER],acti)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network, measure)
    oa = StandardGeneticAlgorithm(P, mate, mutate, nnop)
    train(oa, classification_network, oa_name, training_ints, testing_ints, measure)
Exemple #7
0
def main(T, CE):
    """Run this experiment"""
    training_ints = initialize_instances('./../data/wine_train.csv')
    testing_ints = initialize_instances('./../data/wine_test.csv')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    acti = LogisticSigmoid()
    rule = RPROPUpdateRule()
    oa_name = "SA_{}_{}".format(T, CE)
    with open(OUTFILE,'w') as f:
        f.write('{},{},{},{},{},{}\n'.format('iteration','MSE_trg','MSE_tst','acc_trg','acc_tst','elapsed'))
    classification_network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER1, OUTPUT_LAYER],acti)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network, measure)
    oa = SimulatedAnnealing(T, CE, nnop)
    train(oa, classification_network, oa_name, training_ints, testing_ints, measure, TRAINING_ITERATIONS, OUTFILE)
Exemple #8
0
def main(CE):
    """Run this experiment"""
    training_ints = initialize_instances('./clean_data/adult_train.txt')
    testing_ints = initialize_instances('./clean_data/adult_test.txt')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    logunit = LogisticSigmoid()
    rule = RPROPUpdateRule()
    oa_name = "\nSA_cooling: %0.02f\n" % (CE)
    classification_network = factory.createClassificationNetwork(
        [INPUT_LAYER, HIDDEN_LAYER, OUTPUT_LAYER])
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network,
                                            measure)
    oa = SimulatedAnnealing(1E10, CE, nnop)
    train(oa, classification_network, oa_name, training_ints, testing_ints,
          measure)
Exemple #9
0
def initialize_networks_and_optimization(networks, nnop, oa, instances,
                                         factory=BackPropagationNetworkFactory(),
                                         measure = SumOfSquaresError()):
    del networks[:]
    del nnop[:]
    del oa[:]

    data_set = DataSet(instances)
    for _ in OA_NAMES:
        classification_network = factory.createClassificationNetwork([
            INPUT_LAYER, HIDDEN_LAYER, OUTPUT_LAYER], LogisticSigmoid())
        networks.append(classification_network)
        nnop.append(NeuralNetworkOptimizationProblem(data_set, classification_network, measure))

    oa.append(RandomizedHillClimbing(nnop[0]))
    oa.append(SimulatedAnnealing(1E11, .95, nnop[1]))
    oa.append(StandardGeneticAlgorithm(200, 100, 10, nnop[2]))
Exemple #10
0
def main(CE):
    """Run this experiment"""
    training_ints = initialize_instances(PATH + "X_train.csv")
    testing_ints = initialize_instances(PATH + "X_test.csv")
    validation_ints = initialize_instances(PATH + "y_train.csv")
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    logistic_sigmoid = LogisticSigmoid()
    rule = RPROPUpdateRule()
    oa_name = "SA{}".format(CE)
    with open(OUTFILE.replace('XXX',oa_name),'w') as f:
        f.write('{},{},{},{},{},{},{},{}\n'.format('iteration','MSE_trg','MSE_val','MSE_tst','acc_trg','acc_val','acc_tst','elapsed'))
    classification_network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER1,OUTPUT_LAYER],logistic_sigmoid)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network, measure)
    oa = SimulatedAnnealing(1E10, CE, nnop)
    train(oa, classification_network, oa_name, training_ints,validation_ints,testing_ints, measure)
Exemple #11
0
def main():
    """Run this experiment"""
    training_ints = initialize_instances('./clean_data/adult_train.txt')
    testing_ints = initialize_instances('./clean_data/adult_test.txt')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    logunit = LogisticSigmoid()
    rule = RPROPUpdateRule()
    oa_names = ["RHC"]
    classification_network = factory.createClassificationNetwork(
        [INPUT_LAYER, HIDDEN_LAYER, OUTPUT_LAYER])  #,logunit)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network,
                                            measure)
    oa = RandomizedHillClimbing(nnop)
    train(oa, classification_network, 'RHC', training_ints, testing_ints,
          measure)
Exemple #12
0
def main(P, mate, mutate):
    """Run this experiment"""
    training_ints = initialize_instances('./clean_data/adult_train.txt')
    testing_ints = initialize_instances('./clean_data/adult_test.txt')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    logunit = LogisticSigmoid()
    rule = RPROPUpdateRule()
    oa_name = "\nGA_tuning: %d , %d, %d\n" % (P, mate, mutate)
    classification_network = factory.createClassificationNetwork(
        [INPUT_LAYER, HIDDEN_LAYER, OUTPUT_LAYER])
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network,
                                            measure)
    oa = StandardGeneticAlgorithm(P, mate, mutate, nnop)
    train(oa, classification_network, oa_name, training_ints, testing_ints,
          measure)
def main():
    """Run this experiment"""
    training_ints = initialize_instances(PATH + "X_train.csv")
    testing_ints = initialize_instances(PATH + "X_test.csv")
    validation_ints = initialize_instances(PATH + "y_train.csv")
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    logistic_sigmoid = LogisticSigmoid()
    data_set = DataSet(training_ints)
    data_set_size = data_set.size()
    classification_network = factory.createClassificationNetwork(
        [INPUT_LAYER, HIDDEN_LAYER1, OUTPUT_LAYER], logistic_sigmoid)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network,
                                            measure)
    oa = StandardGeneticAlgorithm(data_set_size, int(0.5 * data_set_size),
                                  int(0.1 * data_set_size), nnop)
    train(oa, classification_network, 'GA', training_ints, validation_ints,
          testing_ints, measure)
Exemple #14
0
def main(layers, training_iterations, test_data_file, train_data_file,
         validate_data_file):
    """Run this experiment"""
    training_ints = base.initialize_instances(train_data_file)
    testing_ints = base.initialize_instances(test_data_file)
    validation_ints = base.initialize_instances(validate_data_file)
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    # relu = RELU()
    relu = LogisticSigmoid()
    # 50 and 0.000001 are the defaults from RPROPUpdateRule.java
    rule = RPROPUpdateRule(0.064, 50, 0.000001)
    oa_names = ["Backprop"]
    classification_network = factory.createClassificationNetwork(layers, relu)
    base.train(
        BatchBackPropagationTrainer(data_set, classification_network, measure,
                                    rule), classification_network, 'Backprop',
        training_ints, validation_ints, testing_ints, measure,
        training_iterations, OUTFILE.format('Backprop'))
    return
Exemple #15
0
def main(layers, training_iterations, test_data_file, train_data_file,
         validate_data_file):
    """Run this experiment"""
    training_ints = base.initialize_instances(train_data_file)
    testing_ints = base.initialize_instances(test_data_file)
    validation_ints = base.initialize_instances(validate_data_file)
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    # relu = RELU()
    relu = LogisticSigmoid()
    # 50 and 0.000001 are the defaults from RPROPUpdateRule.java
    rule = RPROPUpdateRule(0.064, 50, 0.000001)
    oa_names = ["RHC"]
    classification_network = factory.createClassificationNetwork(layers, relu)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network,
                                            measure)
    oa = RandomizedHillClimbing(nnop)
    base.train(oa, classification_network, 'RHC', training_ints,
               validation_ints, testing_ints, measure, training_iterations,
               OUTFILE.format('RHC'))
    return
Exemple #16
0
def main():
    """Run this experiment"""
    training_ints = initialize_instances(PATH + "X_train.csv")
    testing_ints = initialize_instances(PATH + "X_test.csv")
    validation_ints = initialize_instances(PATH + "y_train.csv")
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    logistic_sigmoid = LogisticSigmoid()
    data_set = DataSet(training_ints)
    data_set_size = data_set.size()
    print(data_set_size)
    print(type(data_set_size))
    odd = DiscreteUniformDistribution([data_set_size])
    df = DiscreteDependencyTree(.1, [data_set_size])
    classification_network = factory.createClassificationNetwork(
        [INPUT_LAYER, HIDDEN_LAYER1, OUTPUT_LAYER], logistic_sigmoid)
    evaluation = NeuralNetworkEvaluationFunction(classification_network,
                                                 data_set, measure)
    pop = GenericProbabilisticOptimizationProblem(evaluation, odd, df)
    oa = MIMIC(data_set_size, int(0.1 * data_set_size), pop)
    train(oa, classification_network, 'GA', training_ints, validation_ints,
          testing_ints, measure)
def main():
    """Run this experiment"""
    training_ints = initialize_instances(
        '/Users/Sean/School/GeorgiaTech/CS7641/Assignment2/s_trg.csv')
    testing_ints = initialize_instances(
        '/Users/Sean/School/GeorgiaTech/CS7641/Assignment2/s_test.csv')
    validation_ints = initialize_instances(
        '/Users/Sean/School/GeorgiaTech/CS7641/Assignment2/s_val.csv')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    sig = LogisticSigmoid()
    rule = RPROPUpdateRule()
    oa_names = ["RHC"]
    classification_network = factory.createClassificationNetwork([
        INPUT_LAYER, HIDDEN_LAYER1, HIDDEN_LAYER2, HIDDEN_LAYER3, OUTPUT_LAYER
    ], sig)
    for trial in xrange(TRIALS):
        oa = RandomizedHillClimbing(
            NeuralNetworkOptimizationProblem(data_set, classification_network,
                                             measure))
        train(oa, classification_network, 'RHC', training_ints,
              validation_ints, testing_ints, measure)
Exemple #18
0
def main():
    optalgs = ['SA']

    OA = {
        'SA': SimulatedAnnealing
    }

    params = {
        'SA': [
            [1e2, 0.15], [1e2, 0.25], [1e2, 0.35], [1e2, 0.45], [1e2, 0.55],
            [1e2, 0.65], [1e2, 0.75], [1e2, 0.85], [1e2, 0.95]
        ]
    }

    identifier = {
        'SA': lambda p: str(p[1]).replace('.', '_')
    }

    iterations = [10, 50, 100, 200, 500, 1000, 2000, 3000, 4000, 5000]

    train_instances, test_instances = initialize_instances()

    data_set = DataSet(train_instances)
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()

    for optalg in optalgs:
        for param in params[optalg]:
            output_filename = '%s-%s.csv' % (optalg, identifier[optalg](param))
            csv_file = open(output_filename, 'w')
            fields = ['num_iterations', 'train_accuracy', 'test_accuracy', 'train_time', 'test_time']
            writer = csv.DictWriter(csv_file, fieldnames=fields)
            writer.writeheader()

            for num_iterations in iterations:
                network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER, OUTPUT_LAYER], \
                                                              LogisticSigmoid())
                nnop = NeuralNetworkOptimizationProblem(data_set, network, measure)

                oa = OA[optalg](*(param + [nnop]))

                start = time.time()
                train(oa, network, optalg, train_instances, measure, num_iterations)
                end = time.time()
                train_time = end - start

                optimal_instance = oa.getOptimal()
                network.setWeights(optimal_instance.getData())
                train_accuracy = test(network, train_instances)

                start = time.time()
                test_accuracy = test(network, test_instances)
                end = time.time()
                test_time = end - start

                results = {
                    'num_iterations': num_iterations,
                    'train_accuracy': train_accuracy,
                    'test_accuracy': test_accuracy,
                    'train_time': train_time,
                    'test_time': test_time
                }

                print optalg, param, results
                writer.writerow(results)

            csv_file.close()
            print '------'

        print '***** ***** ***** ***** *****'
Exemple #19
0
def main():
    """Run algorithms on the abalone dataset."""
    instances = initialize_instances()
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(instances)

    networks = []  # BackPropagationNetwork
    nnop = []  # NeuralNetworkOptimizationProblem
    oa = []  # OptimizationAlgorithm
    # oa_names = ["RHC", "SA", "GA"]
    oa_names = ["SA"]
    results = ""

    for name in oa_names:
        classification_network = factory.createClassificationNetwork([INPUT_LAYER,
                                                                      HIDDEN_LAYER_1,
                                                                      HIDDEN_LAYER_2,
                                                                      OUTPUT_LAYER],
                                                                     LogisticSigmoid())
        networks.append(classification_network)
        nnop.append(NeuralNetworkOptimizationProblem(data_set, classification_network, measure))

    # oa.append(RandomizedHillClimbing(nnop[0]))
    oa.append(SimulatedAnnealing(1E11, .8, nnop[0]))
    # oa.append(StandardGeneticAlgorithm(300, 150, 15, nnop[2]))

    for i, name in enumerate(oa_names):
        start = time.time()
        correct = 0
        incorrect = 0

        err_hist = train(oa[i], networks[i], oa_names[i], instances, measure)
        end = time.time()
        training_time = end - start

        # output error history
        EH_FILE = name+'_3000_0.8.csv'
        with open(EH_FILE, 'w') as f:
            writer = csv.writer(f)
            writer.writerows(err_hist)

        optimal_instance = oa[i].getOptimal()
        networks[i].setWeights(optimal_instance.getData())

        start = time.time()
        for instance in instances:
            networks[i].setInputValues(instance.getData())
            networks[i].run()

            y_true = instance.getLabel().getContinuous()
            y_prob = networks[i].getOutputValues().get(0)

            if abs(y_true - y_prob) < 0.5:
                correct += 1
            else:
                incorrect += 1

        end = time.time()
        testing_time = end - start

        results += "\nResults for %s: \nCorrectly classified %d instances." % (name, correct)
        results += "\nIncorrectly classified %d instances.\nPercent correctly classified: %0.03f%%" % (incorrect, float(correct)/(correct+incorrect)*100.0)
        results += "\nTraining time: %0.03f seconds" % (training_time,)
        results += "\nTesting time: %0.03f seconds\n" % (testing_time,)

    print results
Exemple #20
0
def main():
    optalgs = ['GA-new']

    OA = {
        'GA': StandardGeneticAlgorithm,
    }

    # params = {
    #     'GA': [
    #         [10, 5, 5], [20, 10, 10], [30, 15, 15], [40, 30, 20], [80, 50, 30],
    #         [150, 100, 30], [300, 120, 40], [500, 300, 50]
    #     ],
    # }

    # varying population
    params = {
        'GA': [

        ],
    }

    identifier = {
        'GA': lambda p: '_'.join([str(v) for v in p]),
    }

    iterations = [1000, 2000]

    train_instances, test_instances = initialize_instances()

    data_set = DataSet(train_instances)
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()

    for optalg in optalgs:
        for param in params[optalg]:
            output_filename = '%s-%s.csv' % (optalg, identifier[optalg](param))
            csv_file = open(output_filename, 'w')
            fields = ['num_iterations', 'train_accuracy', 'test_accuracy', 'train_time', 'test_time']
            writer = csv.DictWriter(csv_file, fieldnames=fields)
            writer.writeheader()

            for num_iterations in iterations:
                network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER, OUTPUT_LAYER], \
                                                              LogisticSigmoid())
                nnop = NeuralNetworkOptimizationProblem(data_set, network, measure)

                oa = OA[optalg](*(param + [nnop]))

                start = time.time()
                train(oa, network, optalg, train_instances, measure, num_iterations)
                end = time.time()
                train_time = end - start

                optimal_instance = oa.getOptimal()
                network.setWeights(optimal_instance.getData())
                train_accuracy = test(network, train_instances)

                start = time.time()
                test_accuracy = test(network, test_instances)
                end = time.time()
                test_time = end - start

                results = {
                    'num_iterations': num_iterations,
                    'train_accuracy': train_accuracy,
                    'test_accuracy': test_accuracy,
                    'train_time': train_time,
                    'test_time': test_time
                }

                print optalg, param, results
                writer.writerow(results)

            csv_file.close()
            print '------'

        print '***** ***** ***** ***** *****'