예제 #1
0
def main(sample, keep, m):
    """Run this experiment"""
    all_data = get_all_data()
    train_set, val_set = get_cv_set(all_data)
    train_data, train_label = process_data(train_set)
    val_data, val_label = process_data(val_set)
    training_ints = initialize_instances(train_data, train_label)
    testing_ints = initialize_instances(val_data, val_label)
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    relu = RELU()
    #rule = RPROPUpdateRule()
    oa_name = "MIMIC{}".format(keep)
    with open(OUTFILE.replace('XXX', oa_name), 'w') as f:
        f.write('{},{},{},{},{},{}\n'.format('iteration', 'MSE_trg', 'MSE_tst',
                                             'acc_trg', 'acc_tst', 'elapsed'))
    classification_network = factory.createClassificationNetwork(
        [INPUT_LAYER, OUTPUT_LAYER], relu)
    ranges = array('i', [2] * 1)
    #ranges = array('i', train_label)
    ef = ContinuousPeaksEvaluationFunction(keep)
    odd = DiscreteUniformDistribution(ranges)
    df = DiscreteDependencyTree(m, ranges)
    pop = GenericProbabilisticOptimizationProblem(ef, odd, df)
    oa = MIMIC(sample, keep, pop)
    train(oa, classification_network, oa_name, training_ints, testing_ints,
          measure)
예제 #2
0
def main():
    """Run this experiment"""
    all_data = get_all_data()
    train_set, val_set = get_cv_set(all_data)
    train_data, train_label = process_data(train_set)
    val_data, val_label = process_data(val_set)
    training_ints = initialize_instances(train_data, train_label)
    testing_ints = initialize_instances(val_data, val_label)
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    relu = RELU()
    rule = RPROPUpdateRule()
    oa_names = ["Backprop"]
    classification_network = factory.createClassificationNetwork(
        [INPUT_LAYER, OUTPUT_LAYER], relu)
    train(
        BatchBackPropagationTrainer(data_set, classification_network, measure,
                                    rule), classification_network, 'Backprop',
        training_ints, testing_ints, measure)
예제 #3
0
def main():
    """Run this experiment"""
    all_data = get_all_data()
    train_set, val_set = get_cv_set(all_data)
    train_data, train_label = process_data(train_set)
    val_data, val_label = process_data(val_set)
    training_ints = initialize_instances(train_data, train_label)
    testing_ints = initialize_instances(val_data, val_label)
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    relu = RELU()
    #rule = RPROPUpdateRule()
    classification_network = factory.createClassificationNetwork(
        [INPUT_LAYER, OUTPUT_LAYER], relu)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network,
                                            measure)
    oa = RandomizedHillClimbing(nnop)
    train(oa, classification_network, 'RHC', training_ints, testing_ints,
          measure)
예제 #4
0
def main(CE):
    """Run this experiment"""
    all_data = get_all_data()
    train_set, val_set = get_cv_set(all_data)
    train_data, train_label = process_data(train_set)
    val_data, val_label = process_data(val_set)
    training_ints = initialize_instances(train_data, train_label)
    testing_ints = initialize_instances(val_data, val_label)
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    relu = RELU()
    #rule = RPROPUpdateRule()
    oa_name = "SA{}".format(CE)
    with open(OUTFILE.replace('XXX', oa_name), 'w') as f:
        f.write('{},{},{},{},{},{}\n'.format('iteration', 'MSE_trg', 'MSE_tst',
                                             'acc_trg', 'acc_tst', 'elapsed'))
    classification_network = factory.createClassificationNetwork(
        [INPUT_LAYER, OUTPUT_LAYER], relu)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network,
                                            measure)
    oa = SimulatedAnnealing(1E10, CE, nnop)
    train(oa, classification_network, oa_name, training_ints, testing_ints,
          measure)
예제 #5
0
def main():
    """Run algorithms on the abalone dataset."""
    all_data = get_all_data()
    train_set, val_set = get_cv_set(all_data)
    train_data, train_label = process_data(train_set)
    val_data, val_label = process_data(val_set)
    training_ints = initialize_instances(train_data, train_label)
    testing_ints = initialize_instances(val_data, val_label)
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)

    networks = []  # BackPropagationNetwork
    nnop = []  # NeuralNetworkOptimizationProblem
    oa = []  # OptimizationAlgorithm
    oa_names = ["RHC", "SA", "GA"]
    results = ""

    for name in oa_names:
        classification_network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER, OUTPUT_LAYER])
        networks.append(classification_network)
        nnop.append(NeuralNetworkOptimizationProblem(data_set, classification_network, measure))

    oa.append(RandomizedHillClimbing(nnop[0]))
    oa.append(SimulatedAnnealing(1E11, .95, nnop[1]))
    oa.append(StandardGeneticAlgorithm(200, 100, 10, nnop[2]))

    for i, name in enumerate(oa_names):
        start = time.time()
        correct = 0
        incorrect = 0

        train(oa[i], networks[i], oa_names[i], training_ints, measure)
        end = time.time()
        training_time = end - start

        optimal_instance = oa[i].getOptimal()
        networks[i].setWeights(optimal_instance.getData())

        start = time.time()
        for instance in training_ints:
            networks[i].setInputValues(instance.getData())
            networks[i].run()

            predicted = instance.getLabel().getContinuous()
            actual = networks[i].getOutputValues().get(0)

            if abs(predicted - actual) < 0.5:
                correct += 1
            else:
                incorrect += 1

        end = time.time()
        testing_time = end - start

        results += "\nResults for %s: \nCorrectly classified %d instances." % (name, correct)
        results += "\nIncorrectly classified %d instances.\nPercent correctly classified: %0.03f%%" % (incorrect, float(correct)/(correct+incorrect)*100.0)
        results += "\nTraining time: %0.03f seconds" % (training_time,)
        results += "\nTesting time: %0.03f seconds\n" % (testing_time,)

    print(results)