def buildXor(self):
     self.params['dataset'] = 'XOR'
     d = ClassificationDataSet(2)
     d.addSample([0., 0.], [0.])
     d.addSample([0., 1.], [1.])
     d.addSample([1., 0.], [1.])
     d.addSample([1., 1.], [0.])
     d.setField('class', [[0.], [1.], [1.], [0.]])
     self.trn_data = d
     self.tst_data = d
     global trn_data
     trn_data = self.trn_data
     nn = FeedForwardNetwork()
     inLayer = TanhLayer(2, name='in')
     hiddenLayer = TanhLayer(3, name='hidden0')
     outLayer = ThresholdLayer(1, name='out')
     nn.addInputModule(inLayer)
     nn.addModule(hiddenLayer)
     nn.addOutputModule(outLayer)
     in_to_hidden = FullConnection(inLayer, hiddenLayer)
     hidden_to_out = FullConnection(hiddenLayer, outLayer)
     nn.addConnection(in_to_hidden)
     nn.addConnection(hidden_to_out)
     nn.sortModules()
     nn.randomize()
     self.net_settings = str(nn.connections)
     self.nn = nn
Example #2
0
 def buildXor(self):
     self.params['dataset'] = 'XOR'
     d = ClassificationDataSet(2)
     d.addSample([0., 0.], [0.])
     d.addSample([0., 1.], [1.])
     d.addSample([1., 0.], [1.])
     d.addSample([1., 1.], [0.])
     d.setField('class', [[0.], [1.], [1.], [0.]])
     self.trn_data = d
     self.tst_data = d
     global trn_data
     trn_data = self.trn_data
     nn = FeedForwardNetwork()
     inLayer = TanhLayer(2, name='in')
     hiddenLayer = TanhLayer(3, name='hidden0')
     outLayer = ThresholdLayer(1, name='out')
     nn.addInputModule(inLayer)
     nn.addModule(hiddenLayer)
     nn.addOutputModule(outLayer)
     in_to_hidden = FullConnection(inLayer, hiddenLayer)
     hidden_to_out = FullConnection(hiddenLayer, outLayer)
     nn.addConnection(in_to_hidden)
     nn.addConnection(hidden_to_out)
     nn.sortModules()
     nn.randomize()
     self.net_settings = str(nn.connections)
     self.nn = nn
Example #3
0
def xorDataSet():
    d = ClassificationDataSet(2)
    d.addSample([0., 0.], [0.])
    d.addSample([0., 1.], [1.])
    d.addSample([1., 0.], [1.])
    d.addSample([1., 1.], [0.])
    d.setField('class', [[0.], [1.], [1.], [0.]])
    return d
def xorDataSet():
    d = ClassificationDataSet(2)
    d.addSample([0., 0.], [0.])
    d.addSample([0., 1.], [1.])
    d.addSample([1., 0.], [1.])
    d.addSample([1., 1.], [0.])
    d.setField('class', [[0.], [1.], [1.], [0.]])
    return d
Example #5
0
def neutral_net(train_data,test_data,n_est,maxd):
    
    #ds = SupervisedDataSet(len(train_data[0,:])-1,1)
    ds = ClassificationDataSet(len(train_data[0,:])-1,1,nb_classes=2,class_labels=['Lived','Died'])
    X=[];y=[]; X1=[]; y1=[]
    for row in range(0,len(train_data[:,0])):
        X.append(train_data[row,1:].astype(int))
        y.append([train_data[row,0].astype(int)])
        #ds.addSample(train_data[row,1:].astype(int),train_data[row,0].astype(int))
    #for row in range(0,len(test_data[:,0])):
    #    X.append(test_data[row,1:].astype(int))
    #    y.append([test_data[row,0].astype(int)])

    X=np.array(X); y=np.array(y)
    ds.setField('input',X)
    ds.setField('target',y)

    ds._convertToOneOfMany(bounds=[0,1])  # only for classification

    #net = buildNetwork(len(train_data[0,:])-1,100, 1)
    read = False
    if read:
        #net = NetworkReader.readFrom('10_200.xml')  # hiddenclass=SigmoidLayer
		pass
    else:
        net = buildNetwork(ds.indim,maxd,ds.outdim,bias=True,hiddenclass=SigmoidLayer,outclass=SoftmaxLayer)#SoftmaxLayer)        
        trainer = BackpropTrainer(net,dataset=ds,verbose=False,learningrate=0.01,momentum=0.1,weightdecay=0.01)
        trainer.trainUntilConvergence(maxEpochs=n_est,continueEpochs=10,validationProportion=0.3)
        #NetworkWriter.writeToFile(net, '10_200.xml')

    tot = 0.
    for a,b in zip(X,y):
        val = net.activate(a)
        tot+=int((val[0] > val[1] and b==0) or (val[0]<val[1] and b==1))
        '''num = int((net.activate(a)<0.5 and b<0.5) or (net.activate(a)>0.5 and b>0.5))
        tot+=num'''
        
    for row in range(0,len(test_data[:,0])):
        X1.append(test_data[row,1:].astype(int))
        y1.append([test_data[row,0].astype(int)])
    X1=np.array(X1); y1=np.array(y1)
    
    tot1 = 0.
    output = []
    for a,b in zip(X1,y1):
        val = net.activate(a)
        tot1+=int((val[0] > val[1] and b==0) or (val[0]<val[1] and b==1))
        output.append(int(val[0]<val[1]))
        '''num = int((net.activate(a)<0.5 and b<0.5) or (net.activate(a)>0.5 and b>0.5))
        tot1+=num
        output.append(int(net.activate(a)>0.5))'''

    pr.print_results(output)
    
        
    return [tot/len(y),tot1/len(y1)]
 def predict(self, x_test):
     DS = ClassificationDataSet(x_test.shape[1], nb_classes=self.__class_num)
     DS.setField('input', x_test)
     DS.setField('target', np.zeros((x_test.shape[0], 1)))
     DS._convertToOneOfMany()
     out = self.__pybrain_bpnn.activateOnDataset(DS)
     # this part converts an activation vector to a class number
     # i'm saving this for a future purpose
     #out = out.argmax(axis=1)  # the highest output activation gives the class
     #if not self.__class_zero_indexing:  # indexing from 1 - add one to result
     #    out += 1
     return out
    def train(self, x, y, class_number=-1):
        self.__class_num = max(np.unique(y).size, class_number)
        if max(y) == self.__class_num:
            self.__class_zero_indexing = False
            y = np.array([i - 1 for i in y])

        DS = ClassificationDataSet(x.shape[1], nb_classes=self.__class_num)
        DS.setField('input', x)
        DS.setField('target', y.reshape(y.size, 1))
        DS._convertToOneOfMany()

        hidden_num = (DS.indim + DS.outdim) / 2

        self.__pybrain_bpnn = buildNetwork(DS.indim, hidden_num, DS.outdim, bias=True, hiddenclass=SigmoidLayer, outclass=SoftmaxLayer)

        trainer = BackpropTrainer(self.__pybrain_bpnn, dataset=DS, learningrate=0.07, lrdecay=1.0, momentum=0.6)

        trainer.trainUntilConvergence(DS, maxEpochs=30)
Example #8
0
    def test_ann(self):
        from pybrain.datasets.classification import ClassificationDataSet
        # below line can be replaced with the algorithm of choice e.g.
        # from pybrain.optimization.hillclimber import HillClimber
        from pybrain.optimization.populationbased.ga import GA
        from pybrain.tools.shortcuts import buildNetwork

        # create XOR dataset
        d = ClassificationDataSet(2)
        d.addSample([181, 80], [1])
        d.addSample([177, 70], [1])
        d.addSample([160, 60], [0])
        d.addSample([154, 54], [0])
        d.setField('class', [[0.], [1.], [1.], [0.]])

        nn = buildNetwork(2, 3, 1)
        # d.evaluateModuleMSE takes nn as its first and only argument
        ga = GA(d.evaluateModuleMSE, nn, minimize=True)
        for i in range(100):
            nn = ga.learn(0)[0]

        print nn.activate([181, 80])
    def test_ann(self):
        from pybrain.datasets.classification import ClassificationDataSet
        # below line can be replaced with the algorithm of choice e.g.
        # from pybrain.optimization.hillclimber import HillClimber
        from pybrain.optimization.populationbased.ga import GA
        from pybrain.tools.shortcuts import buildNetwork

        # create XOR dataset
        d = ClassificationDataSet(2)
        d.addSample([181, 80], [1])
        d.addSample([177, 70], [1])
        d.addSample([160, 60], [0])
        d.addSample([154, 54], [0])
        d.setField('class', [ [0.],[1.],[1.],[0.]])

        nn = buildNetwork(2, 3, 1)
        # d.evaluateModuleMSE takes nn as its first and only argument
        ga = GA(d.evaluateModuleMSE, nn, minimize=True)
        for i in range(100):
            nn = ga.learn(0)[0]

        print nn.activate([181, 80])
Example #10
0
    from pybrain.datasets.classification import ClassificationDataSet
    from pybrain.optimization.populationbased.ga import GA
    from pybrain.tools.shortcuts import buildNetwork

    # create XOR dataset
    d = ClassificationDataSet(2)
    d.addSample([0., 0.], [0.])
    d.addSample([0., 1.], [1.])
    d.addSample([1., 0.], [1.])
    d.addSample([1., 1.], [0.])
    d.setField('class', [ [0.],[1.],[1.],[0.]])

    nn = buildNetwork(2, 3, 1)
    ga = GA(d.evaluateModuleMSE, nn, minimize=True)
    for i in range(100):
        nn = ga.learn(0)[0]

    # test results after the above script
    In [68]: nn.activate([0,0])
    Out[68]: array([-0.07944574])

    In [69]: nn.activate([1,0])
    Out[69]: array([ 0.97635635])

    In [70]: nn.activate([0,1])
    Out[70]: array([ 1.0216745])

    In [71]: nn.activate([1,1])
    Out[71]: array([ 0.03604205])