Beispiel #1
0
def test_autoencoder():
    shapes = [[8,3,8],[10,5,10],[30,10,30]]
    for nnshape in shapes:
        parD = nn.initialize(nnshape)
        alpha = 25
        x = np.identity(nnshape[0])    
        y = x
        #iterations parameter must be high enough for bigger shapes
        parD = nn.trainNN(parD,alpha,x,y,iterations=5000)
        autoout = nn.testNN(x,parD,outreturn=True)
        assert np.array_equal(np.around(autoout),x)
Beispiel #2
0
def test_autoencoder():
    shapes = [[8, 3, 8], [10, 5, 10], [30, 10, 30]]
    for nnshape in shapes:
        parD = nn.initialize(nnshape)
        alpha = 25
        x = np.identity(nnshape[0])
        y = x
        #iterations parameter must be high enough for bigger shapes
        parD = nn.trainNN(parD, alpha, x, y, iterations=5000)
        autoout = nn.testNN(x, parD, outreturn=True)
        assert np.array_equal(np.around(autoout), x)
Beispiel #3
0
def test(testFile,modelFile,classifier):
    if(classifier == 'nearest'):
        print("Testing k-Nearest Neighbour Classifier")
        correctAngle = getCorrectValues(testFile) #Returns a dictionary of test image_names -> correct_angles
        import nearest as near
        K = 25
        predictedAngle = near.testNearest(testFile,modelFile,K,correctAngle)
        print("Accuracy for k-Nearest Neighbour is: ", end=" ")
        print(near.getAccuracy(predictedAngle, correctAngle), end="%")
        print()
    elif(classifier == 'adaboost'):
        testAdaBoost(testFile)
    elif(classifier == 'nnet'):
        print("Testing Neural Network Classifier")
        import neuralnet as nn
        model = getModel(modelFile)
        nn.testNN(model,testFile)
    elif(classifier == 'best'):
        import best as b
        model = getModel(modelFile)
        b.testBest(model,testFile)
Beispiel #4
0
def testBest(model,testFile):
    print("Testing Best Classifier")
    import neuralnet as nn
    nn.testNN(model, testFile)
import neuralnet as nn
import numpy as np

shapes = [[8, 3, 8], [10, 5, 10], [30, 10, 30], [8, 40, 8]]
shape = -1
for nnshape in shapes:
    shape += 1
    datacollect = list()
    for alpha in [1] + list(range(5, 101, 5)):
        for iterations in range(100, 5001, 100):
            parD = nn.initialize(nnshape)
            x = np.identity(nnshape[0])
            y = x
            #iterations parameter must be high enough for bigger shapes
            parD = nn.trainNN(parD, alpha, x, y, iterations=iterations)
            autoout = nn.testNN(x, parD, outreturn=True)
            avgerr = np.average(abs(autoout - x))
            datacollect.append([shape, alpha, iterations, avgerr])
    np.save("%d_data" % shape, np.array(datacollect))
    parD = initialize(nnshape)

    batches = list(range(10,len(outtrain),10)) # train in batches of 10 
    for iterations in tqdm(range(3000)):
        for index, batch in enumerate(batches):
            if index == 0:
                batch_intrain = intrain[:batch,:]
                batch_outtrain = np.matrix(outtrain[:batch]).T
            elif batch == batches[-1]:
                batch_intrain = intrain[batch:,:]
                batch_outtrain = np.matrix(outtrain[batch:]).T
            else:
                batch_intrain = intrain[batches[index-1]:batches[index],:]
                batch_outtrain = np.matrix(outtrain[batches[index-1]:batches[index]]).T

            parD = forback(parD, alpha,
                           batch_intrain, 
                           batch_outtrain, 
                           nnshape)

    AUC = testNN(intest,parD,AUC=True,y=outtest)
    count += 1

allseqs = seqio.getseqs("rap1-lieb-test.txt")[0]
binarr = seqio.onehot(allseqs)

with open("predictions.txt","w") as f:
    for i, seq in enumerate(allseqs):
        output = float(testNN(binarr[i],parD,outreturn=True))
        f.write(seq + "\t" + "{:5.4f}".format(output) + "\n")
import neuralnet as nn
import numpy as  np

shapes = [[8,3,8],[10,5,10],[30,10,30],[8,40,8]]
shape  = -1
for nnshape in shapes:
    shape += 1
    datacollect = list()
    for alpha in [1] + list(range(5,101,5)):
        for iterations in range(100,5001,100):
            parD = nn.initialize(nnshape)
            x = np.identity(nnshape[0])    
            y = x
            #iterations parameter must be high enough for bigger shapes
            parD = nn.trainNN(parD,alpha,x,y,iterations=iterations)
            autoout = nn.testNN(x,parD,outreturn=True)
            avgerr = np.average(abs(autoout - x))
            datacollect.append([shape,alpha,iterations,avgerr])
    np.save("%d_data" % shape,np.array(datacollect))

    intest = inputarray[test]
    outtest = outputarray[test]

    parD = initialize(nnshape)

    batches = list(range(10, len(outtrain), 10))  # train in batches of 10
    for iterations in tqdm(range(3000)):
        for index, batch in enumerate(batches):
            if index == 0:
                batch_intrain = intrain[:batch, :]
                batch_outtrain = np.matrix(outtrain[:batch]).T
            elif batch == batches[-1]:
                batch_intrain = intrain[batch:, :]
                batch_outtrain = np.matrix(outtrain[batch:]).T
            else:
                batch_intrain = intrain[batches[index - 1]:batches[index], :]
                batch_outtrain = np.matrix(
                    outtrain[batches[index - 1]:batches[index]]).T

            parD = forback(parD, alpha, batch_intrain, batch_outtrain, nnshape)

    AUC = testNN(intest, parD, AUC=True, y=outtest)
    count += 1

allseqs = seqio.getseqs("rap1-lieb-test.txt")[0]
binarr = seqio.onehot(allseqs)

with open("predictions.txt", "w") as f:
    for i, seq in enumerate(allseqs):
        output = float(testNN(binarr[i], parD, outreturn=True))
        f.write(seq + "\t" + "{:5.4f}".format(output) + "\n")