示例#1
0
def importCatDogANN(fileName=root.path() + "/res/recCatDogANN"):
    n = FeedForwardNetwork()
    n.addInputModule(LinearLayer(7500, name='in'))
    n.addModule(SigmoidLayer(9000, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()
    params = np.load(root.path() + '/res/cat_dog_params.txt.npy')
    n._setParameters(params)
    return n
示例#2
0
def importCatDogANN(fileName = root.path()+"/res/recCatDogANN"):
    n = FeedForwardNetwork()
    n.addInputModule(LinearLayer(7500, name='in'))
    n.addModule(SigmoidLayer(9000, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()
    params = np.load(root.path()+'/res/cat_dog_params.txt.npy')
    n._setParameters(params)
    return n
示例#3
0
def trainedANN():
    n = FeedForwardNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()

    draw_connections(n)
    # d = generateTrainingData()
    d = getDatasetFromFile(root.path() + "/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 20:
            return trainedANN()

    exportANN(n)
    draw_connections(n)

    return n
示例#4
0
def trainedRNN():
    n = RecurrentNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.addRecurrentConnection(NMConnection(n['out'], n['out'], name='nmc'))
    # n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], inSliceFrom = 0, inSliceTo = 1, outSliceFrom = 0, outSliceTo = 3))
    n.sortModules()

    draw_connections(n)
    d = getDatasetFromFile(root.path() + "/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 50:
            return trainedRNN()
    # exportRNN(n)
    draw_connections(n)

    return n
示例#5
0
def trainedRNN():
    n = RecurrentNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.addRecurrentConnection(NMConnection(n['out'], n['out'], name='nmc'))
    # n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], inSliceFrom = 0, inSliceTo = 1, outSliceFrom = 0, outSliceTo = 3))
    n.sortModules()

    draw_connections(n)
    d = getDatasetFromFile(root.path()+"/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 50:
            return trainedRNN()
    # exportRNN(n)
    draw_connections(n)

    return n
示例#6
0
def trainedANN():
    n = FeedForwardNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()

    draw_connections(n)
    # d = generateTrainingData()
    d = getDatasetFromFile(root.path()+"/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 20:
            return trainedANN()

    exportANN(n)
    draw_connections(n)

    return n
示例#7
0
def exportCatDogRFCNN(net, fileName = root.path()+"/res/cat_dog_fc_params"):
    # arr = net.params
    # np.save(fileName, arr)
    # fileObject = open(fileName+'.pickle', 'w')
    # pickle.dump(net, fileObject)
    # fileObject.close()
    NetworkWriter.writeToFile(net, fileName+'.xml')
示例#8
0
def exportCatDogRNN(net, fileName=root.path() + "/res/cat_dog_nm_params"):
    # arr = net.params
    # np.save(fileName, arr)
    # fileObject = open(fileName+'.pickle', 'w')
    # pickle.dump(net, fileObject)
    # fileObject.close()
    NetworkWriter.writeToFile(net, fileName + '.xml')
示例#9
0
def load_mnist():
    """
    Loads MNIST files into 3D numpy arrays

    Adapted from: http://abel.ee.ucla.edu/cvxopt/_downloads/mnist.py
    """
    path = str(root.path()) + '/res/mnist.pkl.gz'
    f = gzip.open(path, 'rb')
    train_set, valid_set, test_set = cPickle.load(f)
    f.close()
    return train_set, valid_set, test_set
示例#10
0
def create1OrderDataSet():
    lab_images = get_train_set(instance=False, number_of_instances=10)
    ds = SupervisedDataSet(100, 1)
    for i in range(len(lab_images)):
        data = np.zeros((100))
        for j in range(100):
            data[j] = lab_images[i][0][j]
        ds.addSample(data, lab_images[i][1])
        print "creating dataset, iteration:",i,"of",len(lab_images)
    ds.saveToFile(root.path() + '/res/dataset1')
    return ds
示例#11
0
def create1OrderDataSet():
    lab_images = get_train_set(instance=False, number_of_instances=10)
    ds = SupervisedDataSet(100, 1)
    for i in range(len(lab_images)):
        data = np.zeros((100))
        for j in range(100):
            data[j] = lab_images[i][0][j]
        ds.addSample(data, lab_images[i][1])
        print "creating dataset, iteration:", i, "of", len(lab_images)
    ds.saveToFile(root.path() + '/res/dataset1')
    return ds
示例#12
0
def load_mnist():
    """
    Loads MNIST files into 3D numpy arrays

    Adapted from: http://abel.ee.ucla.edu/cvxopt/_downloads/mnist.py
    """
    path = str(root.path()) + '/res/mnist.pkl.gz'
    f = gzip.open(path, 'rb')
    train_set, valid_set, test_set = cPickle.load(f)
    f.close()
    return train_set, valid_set, test_set
示例#13
0
def create2OrderDataSet():
    lab_images = get_train_set(instance=True)
    ds = SupervisedDataSet(5150, 1)
    for i in range(len(lab_images)):
        data = np.zeros((5150))
        for j in range(100):
            data[j] = lab_images[i][0][j]
        count = 100
        for x1 in range(100):
            for x2 in range(x1, 100):
                # print count
                data[count] = lab_images[i][0][x1]*lab_images[i][0][x2]
                count += 1
        ds.addSample(data, lab_images[i][1])
        print "creating dataset, iteration:",i,"of",len(lab_images)
    ds.saveToFile(root.path() + '/res/dataset2')
    return ds
示例#14
0
def create2OrderDataSet():
    lab_images = get_train_set(instance=True)
    ds = SupervisedDataSet(5150, 1)
    for i in range(len(lab_images)):
        data = np.zeros((5150))
        for j in range(100):
            data[j] = lab_images[i][0][j]
        count = 100
        for x1 in range(100):
            for x2 in range(x1, 100):
                # print count
                data[count] = lab_images[i][0][x1] * lab_images[i][0][x2]
                count += 1
        ds.addSample(data, lab_images[i][1])
        print "creating dataset, iteration:", i, "of", len(lab_images)
    ds.saveToFile(root.path() + '/res/dataset2')
    return ds
示例#15
0
def generateTrainingData(size=10000, saveAfter=False):
    """
    Creates a set of training data with 4-dimensioanal input and 2-dimensional output
    with `size` samples
    """
    np.random.seed()
    data = SupervisedDataSet(4, 2)
    for i in xrange(1, int(size / 2)):
        [a, b] = np.random.random_integers(1, 100, 2)
        [c, d] = np.random.random_integers(100, 500, 2)
        data.addSample((a, b, c, d), (-1, 1))

    for i in xrange(1, int(size / 2)):
        [a, b] = np.random.random_integers(100, 500, 2)
        [c, d] = np.random.random_integers(1, 100, 2)
        data.addSample((a, b, c, d), (1, -1))

    if saveAfter:
        data.saveToFile(root.path() + "/res/dataSet")
    return data
示例#16
0
def generateTrainingData(size=10000, saveAfter = False):
    """
    Creates a set of training data with 4-dimensioanal input and 2-dimensional output
    with `size` samples
    """
    np.random.seed()
    data = SupervisedDataSet(4,2)
    for i in xrange(1, int(size/2)):
        [a, b] = np.random.random_integers(1, 100, 2)
        [c, d] = np.random.random_integers(100, 500, 2)
        data.addSample((a, b, c, d), (-1, 1))

    for i in xrange(1, int(size/2)):
        [a, b] = np.random.random_integers(100, 500, 2)
        [c, d] = np.random.random_integers(1, 100, 2)
        data.addSample((a, b, c, d), (1, -1))

    if saveAfter:
        data.saveToFile(root.path()+"/res/dataSet")
    return data
示例#17
0
def get_cat_dog_testset():
    count = 0
    images = os.listdir(root.path() + '/res/cats_proc/')
    shape = cv2.imread(root.path() + '/res/cats_proc/'+images[0],0).shape
    ds = SupervisedDataSet(shape[0]*shape[1], 2)
    for image in os.listdir(root.path() + '/res/cats_proc/'):
        img = cv2.imread(root.path() + '/res/cats_proc/'+image,0)
        inp = np.reshape(img, shape[0]*shape[1])
        target = [1,0]
        ds.addSample(inp, target)
        count += 1
    for image in os.listdir(root.path() + '/res/dogs_proc/'):
        img = cv2.imread(root.path() + '/res/dogs_proc/'+image,0)
        img = cv2.resize(img, img.shape, fx=0.5, fy=0.5)
        inp = np.reshape(img, shape[0]*shape[1])
        target = [0,1]
        ds.addSample(inp, target)
        count += 1
    return ds
示例#18
0
def load3OrderDataSet():
    ds = SupervisedDataSet.loadFromFile(root.path() + '/res/dataset3')
    return ds
示例#19
0
def exportCatDogANN(net, fileName = root.path()+"/res/cat_dog_params"):
    arr = net.params
    np.save(fileName, arr)
示例#20
0
def importRFCNN(fileName = root.path()+"/res/recRFCNN"):
    fileObject = open(fileName, 'r')
    net = pickle.load(fileObject)
    fileObject.close()
    return net
示例#21
0
def exportRFCNN(net, fileName = root.path()+"/res/recRFCNN"):
    fileObject = open(fileName, 'w')
    pickle.dump(net, fileObject)
    fileObject.close()
示例#22
0
def load3OrderDataSet():
    ds = SupervisedDataSet.loadFromFile(root.path() + '/res/dataset3')
    return ds
示例#23
0
def importCatDogRNN(fileName=root.path() + "/res/recCatDogANN"):
    n = NetworkReader.readFrom(root.path() + "/res/cat_dog_nm_params.xml")
    return n
示例#24
0
def importCatDogRNN(fileName = root.path()+"/res/recCatDogANN"):
    n = NetworkReader.readFrom(root.path()+"/res/cat_dog_nm_params.xml")
    return n
示例#25
0
def exportRFCNN(net, fileName=root.path() + "/res/recRFCNN"):
    fileObject = open(fileName, 'w')
    pickle.dump(net, fileObject)
    fileObject.close()
示例#26
0
def exportCatDogANN(net, fileName=root.path() + "/res/cat_dog_params"):
    arr = net.params
    np.save(fileName, arr)
示例#27
0
def importRFCNN(fileName=root.path() + "/res/recRFCNN"):
    fileObject = open(fileName, 'r')
    net = pickle.load(fileObject)
    fileObject.close()
    return net