Esempio n. 1
0
Datasets: Banknote Authentication | Pima Indians Diabetes Database            .
____________________________________________________________________________"""
np.random.seed(1)
"""_________________________________________________________________________"""

if __name__ == '__main__':
    ds_id = int(input('Enter 0 for Banknote and 1 for Diabetes: '))
    if not ds_id:
        ''' Banknote '''
        features_train, labels_train, features_test, labels_test,\
            yes_examples, no_examples = pp.banknote('banknote.csv')
        h = 7
    else:
        ''' Diabetes '''
        features_train, labels_train, features_test, labels_test,\
            yes_examples, no_examples = pp.diabetes('diabetes.csv')
        h = 15

    # OPT: h = int(input('Enter the number of the hidden layer neurons: '))
    # OPT: epochs = int(input('Enter the number of SGD epochs: '))
    # OPT: learning_rate = int(input('Enter the SGD learning rate: '))
    nn = ann.Network((features_train.shape[1], h + 1, 1),
                     (af.Sigmoid, af.Sigmoid))
    nn.train(features_train,
             labels_train,
             loss=lf.CE,
             epochs=5,
             learning_rate=1e-2)
    status = confusion_matrix(yes_examples, no_examples, nn, features_test,
                              labels_test, ds_id)
Esempio n. 2
0
def main():
    train_label_data = read_file('NeuralNetwork/train-labels.idx1-ubyte')
    magic_number = int.from_bytes(train_label_data[0:4], 'big')
    number_of_items = int.from_bytes(train_label_data[4:8], 'big')
    train_label_data = train_label_data[8:]
    print('magic_number:',magic_number)
    print('number_of_items:',number_of_items)
    print('--------------')

    train_activation_data = read_file('NeuralNetwork/train-images.idx3-ubyte')
    magic_number = int.from_bytes(train_activation_data[0:4], 'big')
    number_of_items = int.from_bytes(train_activation_data[4:8], 'big')
    number_of_rows = int.from_bytes(train_activation_data[8:12], 'big')
    number_of_columns = int.from_bytes(train_activation_data[12:16], 'big')
    train_activation_data = train_activation_data[16:]
    print('magic_number:', magic_number)
    print('number_of_items:', number_of_items)
    print('number_of_rows:', number_of_rows)
    print('number_of_columns:', number_of_columns)

    test_label_data = read_file('NeuralNetwork/train-labels.idx1-ubyte')
    test_label_data = test_label_data[8:]

    test_activation_data = read_file('NeuralNetwork/train-images.idx3-ubyte')
    test_activation_data = test_activation_data[16:]


    offset = 0
    image_size = 784
    image_count = 20

    # for image_index in range(image_count):
    #     image = Image.new('L', (28, 28), 'white')
    #     image.frombytes(activation_data[offset:offset+image_size])
    #     offset += image_size
    #     image_name = 'image_' + str(image_index) + '.bmp'
    #     image.save('NeuralNetwork/' + image_name)

    # runs = 3
    lrate = 0.001
    batch_size = 32
    epochs = 30

    train_activation_data = convert_activation_data(train_activation_data, image_size)
    train_label_data = convert_label_data(train_label_data)
    test_activation_data = convert_activation_data(test_activation_data, image_size)
    test_label_data = convert_label_data(test_label_data)

    #f = open("NeuralNetwork/mnist_model_784_16_16_10.tsv", "w")
    #f.write("epoch\tcost\tlrate\tbatchsize\trun\n")

    #lrates = [float(x)/200 for x in range(2,21)]
    #batch_sizes = [32,64]

    network = ANN.Network([784,16,16,10])   #for each run, initialize the network
    #load_model('NeuralNetwork/model.txt', network)

    for epoch in range(epochs):
        cost = 0
        for i in range(100):
            cost += network.cost(test_activation_data[i], test_label_data[i])
        cost /= 100

        print('epoch:', epoch, 'cost:', cost, 'lrate:', lrate, 'batch-size:', batch_size)

        mini_batch_vectors = create_mini_batch_vectors(number_of_items, batch_size)
        for mini_batch_vector in mini_batch_vectors:
            activations, labels = create_mini_batch(train_activation_data, train_label_data, mini_batch_vector)
            for a, y in zip(activations, labels):
                network.forward_pass(a)
                network.backward_pass(y)
            network.update_weights(lrate, len(mini_batch_vector))
            network.update_biases(lrate, len(mini_batch_vector))

        #f.write(str(epoch+1)+"\t"+str(cost)+"\t"+str(lrate)+"\t"+str(batch_size)+"\t"+str(run+1)+"\n")
    #f.close()
    #save_model('NeuralNetwork/model.txt', network.weights, network.biases)

    for i in range(3):
        print(network.forward_pass(test_activation_data[i]))
        print(network.layers[0].update_activation(test_activation_data[i]))
        print(test_label_data[i])
        print(network.cost(test_activation_data[i], test_label_data[i]))
        print("----------------")
Esempio n. 3
0
from __future__ import division
import os, wave, struct
import lib
import numpy as np
import ANN

if __name__ == '__main__':

    index = 0
    chunk = 256
    n = ANN.Network(14, 30)
    mp = ANN.SelfOrganizingMap(n)

    try:
        files = os.listdir("../cis 830/wav")
        for filename in files:
            try:
                wav = wave.open("../cis 830/wav/" + filename)
                bin = wav.readframes(wav.getnframes())
                data = []
                for i in range(0, len(bin)):
                    if i % 2 != 0:
                        continue

                    data.append(struct.unpack("<h", bin[i:i + 2])[0])

                rate, chunk = (wav.getframerate(), 256)
                duration = (len(data) / rate) * 1000
                featureVector = []
                for i in range(0, int(duration / 10)):
                    frame = data[i * 10:(i * 10) + 30]