Esempio n. 1
0
def trainedANN():
    n = FeedForwardNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()

    draw_connections(n)
    # d = generateTrainingData()
    d = getDatasetFromFile(root.path() + "/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 20:
            return trainedANN()

    exportANN(n)
    draw_connections(n)

    return n
Esempio n. 2
0
def trainedRNN():
    n = RecurrentNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.addRecurrentConnection(NMConnection(n['out'], n['out'], name='nmc'))
    # n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], inSliceFrom = 0, inSliceTo = 1, outSliceFrom = 0, outSliceTo = 3))
    n.sortModules()

    draw_connections(n)
    d = getDatasetFromFile(root.path() + "/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 50:
            return trainedRNN()
    # exportRNN(n)
    draw_connections(n)

    return n
Esempio n. 3
0
def trained_cat_dog_ANN():
    n = FeedForwardNetwork()
    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size + 1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.sortModules()
    n.convertToFastNetwork()
    print 'successful converted to fast network'
    t = BackpropTrainer(n, d, learningrate=0.0001)  #, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break

    exportCatDogANN(n)
    return n
Esempio n. 4
0
def main():
    a = 0
    for i in range(0, 100):
        inLayer = SigmoidLayer(2)
        hiddenLayer = SigmoidLayer(3)
        outLayer = SigmoidLayer(1)

        net = FeedForwardNetwork()
        net.addInputModule(inLayer)
        net.addModule(hiddenLayer)
        net.addOutputModule(outLayer)

        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_out = FullConnection(hiddenLayer, outLayer)

        net.addConnection(in_to_hidden)
        net.addConnection(hidden_to_out)

        net.sortModules()

        ds = SupervisedDataSet(2, 1)
        ds.addSample((1, 1), (0))
        ds.addSample((1, 0), (1))
        ds.addSample((0, 1), (1))
        ds.addSample((0, 0), (0))

        trainer = BackpropTrainer(net, ds)
        trainer.trainUntilConvergence()

        out = net.activate((1, 1))
        if (out < 0.5):
            a = a + 1
    print(str(a) + "/100")
Esempio n. 5
0
 def buildXor(self):
     self.params['dataset'] = 'XOR'
     d = ClassificationDataSet(2)
     d.addSample([0., 0.], [0.])
     d.addSample([0., 1.], [1.])
     d.addSample([1., 0.], [1.])
     d.addSample([1., 1.], [0.])
     d.setField('class', [[0.], [1.], [1.], [0.]])
     self.trn_data = d
     self.tst_data = d
     global trn_data
     trn_data = self.trn_data
     nn = FeedForwardNetwork()
     inLayer = TanhLayer(2, name='in')
     hiddenLayer = TanhLayer(3, name='hidden0')
     outLayer = ThresholdLayer(1, name='out')
     nn.addInputModule(inLayer)
     nn.addModule(hiddenLayer)
     nn.addOutputModule(outLayer)
     in_to_hidden = FullConnection(inLayer, hiddenLayer)
     hidden_to_out = FullConnection(hiddenLayer, outLayer)
     nn.addConnection(in_to_hidden)
     nn.addConnection(hidden_to_out)
     nn.sortModules()
     nn.randomize()
     self.net_settings = str(nn.connections)
     self.nn = nn
Esempio n. 6
0
def trained_cat_dog_RFCNN():
    n = RecurrentNetwork()

    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size + 1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], name='nmc'))
    n.sortModules()

    t = BackpropTrainer(n, d, learningrate=0.0001)  #, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break

    exportCatDogRFCNN(n)
    return n
Esempio n. 7
0
    def __init__(self, indim, outdim, hiddim=6):
        Module.__init__(self, indim, outdim)

        self._network = Network()
        self._in_layer = LinearLayer(indim + outdim)
        self._hid_layer = LSTMLayer(hiddim)
        self._out_layer = LinearLayer(outdim)
        self._bias = BiasUnit()

        self._network.addInputModule(self._in_layer)
        self._network.addModule(self._hid_layer)
        self._network.addModule(self._bias)
        self._network.addOutputModule(self._out_layer)

        self._hid_to_out_connection = FullConnection(self._hid_layer,
                                                     self._out_layer)
        self._in_to_hid_connection = FullConnection(self._in_layer,
                                                    self._hid_layer)
        self._network.addConnection(self._hid_to_out_connection)
        self._network.addConnection(self._in_to_hid_connection)
        self._network.addConnection(FullConnection(self._bias,
                                                   self._hid_layer))

        self._network.sortModules()

        self.offset = self._network.offset
        self.backprojectionFactor = 0.01
Esempio n. 8
0
def importCatDogANN(fileName=root.path() + "/res/recCatDogANN"):
    n = FeedForwardNetwork()
    n.addInputModule(LinearLayer(7500, name='in'))
    n.addModule(SigmoidLayer(9000, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()
    params = np.load(root.path() + '/res/cat_dog_params.txt.npy')
    n._setParameters(params)
    return n
Esempio n. 9
0
def trained3ONN():
    n = FeedForwardNetwork()

    inp = LinearLayer(176850, name='input')
    hid = LinearLayer(3, name='hidden')
    out = LinearLayer(1, name='output')

    #add modules
    n.addOutputModule(out)
    n.addInputModule(inp)
    n.addModule(hid)

    #add connections
    n.addConnection(FullConnection(inp, hid, inSliceTo=100, outSliceTo=1))
    n.addConnection(
        FullConnection(inp,
                       hid,
                       inSliceFrom=100,
                       inSliceTo=5150,
                       outSliceFrom=1,
                       outSliceTo=2))
    n.addConnection(FullConnection(inp, hid, inSliceFrom=5150, outSliceFrom=2))
    n.addConnection(FullConnection(hid, out))

    n.sortModules()
    print "Network created"
    d = load3OrderDataSet()
    print "Data loaded"
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs
    print "Learning started"
    count = 0
    while True:
        globErr = t.train()
        print "iteration #", count, " error = ", globErr
        if globErr < 0.01:
            break
        count = count + 1
        # if (count == 100):
        #     break

    # for i in range(100):
    #     print t.train()

    exportANN(n)

    return n
Esempio n. 10
0
def buildSimpleLSTMNetwork(peepholes=False):
    N = RecurrentNetwork('simpleLstmNet')
    i = LinearLayer(100, name='i')
    h = LSTMLayer(10, peepholes=peepholes, name='lstm')
    o = LinearLayer(1, name='o')
    b = BiasUnit('bias')
    N.addModule(b)
    N.addOutputModule(o)
    N.addInputModule(i)
    N.addModule(h)
    N.addConnection(FullConnection(i, h, name='f1'))
    N.addConnection(FullConnection(b, h, name='f2'))
    N.addRecurrentConnection(FullConnection(h, h, name='r1'))
    N.addConnection(FullConnection(h, o, name='r1'))
    N.sortModules()
    return N
Esempio n. 11
0
 def _establishRecurrence(self):
     """ Adds a recurrent full connection from the output layer to the first
         hidden layer.
     """
     network = self.network
     outlayer = self.getOutputLayer()
     hid1layer = self.getFirstHiddenLayer()
     network.addRecurrentConnection(FullConnection(outlayer, hid1layer))
Esempio n. 12
0
 def buildIris(self):
     self.params['dataset'] = 'iris'
     self.trn_data, self.tst_data = pybrainData(0.5)
     global trn_data
     trn_data = self.trn_data
     nn = FeedForwardNetwork()
     inLayer = TanhLayer(4, name='in')
     hiddenLayer = TanhLayer(6, name='hidden0')
     outLayer = ThresholdLayer(3, name='out')
     nn.addInputModule(inLayer)
     nn.addModule(hiddenLayer)
     nn.addOutputModule(outLayer)
     in_to_hidden = FullConnection(inLayer, hiddenLayer)
     hidden_to_out = FullConnection(hiddenLayer, outLayer)
     nn.addConnection(in_to_hidden)
     nn.addConnection(hidden_to_out)
     nn.sortModules()
     nn.randomize()
     self.net_settings = str(nn.connections)
     self.nn = nn
    def __init__(self, dims, **args):
        """ The one required argument specifies the sizes of each dimension (minimum 2) """
        assert len(dims) == 2
        SwipingNetwork.__init__(self, dims=dims, **args)
        if self.mariopos == None:
            self.mariopos = (dims[0] / 2, dims[1] / 2)

        pdims = product(dims)
        # the input is a 2D-mesh (as a view on a flat input layer)
        inmod = LinearLayer(self.insize * pdims, name='input')
        inmesh = ModuleMesh.viewOnFlatLayer(inmod, dims, 'inmesh')

        # the output is a 2D-mesh (as a view on a flat sigmoid output layer)
        outmod = self.outcomponentclass(self.outputs * pdims, name='output')
        outmesh = ModuleMesh.viewOnFlatLayer(outmod, dims, 'outmesh')

        if self.componentclass is MDLSTMLayer:
            c = lambda: MDLSTMLayer(self.hsize, 2, self.peepholes).meatSlice()
            hiddenmesh = ModuleMesh(c, (self.size, self.size, 4),
                                    'hidden',
                                    baserename=True)
        else:
            hiddenmesh = ModuleMesh.constructWithLayers(
                self.componentclass, self.hsize,
                tuple(list(dims) + [self.swipes]), 'hidden')

        self._buildSwipingStructure(inmesh, hiddenmesh, outmesh)

        o = LinearLayer(self.outputs)
        self.addConnection(IdentityConnection(outmesh[self.mariopos], o))
        self.outmodules = []
        self.addOutputModule(o)

        # add the identity connections for the states
        for m in self.modules:
            if isinstance(m, MDLSTMLayer):
                tmp = m.stateSlice()
                index = 0
                for c in list(self.connections[m]):
                    if isinstance(c.outmod, MDLSTMLayer):
                        self.addConnection(
                            IdentityConnection(
                                tmp,
                                c.outmod.stateSlice(),
                                outSliceFrom=self.hsize * (index),
                                outSliceTo=self.hsize * (index + 1)))
                        index += 1

        # special inputs
        self.addInputModule(LinearLayer(2, name='specialin'))
        self.addConnection(FullConnection(self['specialin'], o))

        self.sortModules()
Esempio n. 14
0
 def buildParity(self):
     self.params['dataset'] = 'parity'
     self.trn_data = ParityDataSet(nsamples=75)
     self.trn_data.setField('class', self.trn_data['target'])
     self.tst_data = ParityDataSet(nsamples=75)
     global trn_data
     trn_data = self.trn_data
     nn = FeedForwardNetwork()
     inLayer = TanhLayer(4, name='in')
     hiddenLayer = TanhLayer(6, name='hidden0')
     outLayer = ThresholdLayer(1, name='out')
     nn.addInputModule(inLayer)
     nn.addModule(hiddenLayer)
     nn.addOutputModule(outLayer)
     in_to_hidden = FullConnection(inLayer, hiddenLayer)
     hidden_to_out = FullConnection(hiddenLayer, outLayer)
     nn.addConnection(in_to_hidden)
     nn.addConnection(hidden_to_out)
     nn.sortModules()
     nn.randomize()
     self.net_settings = str(nn.connections)
     self.nn = nn
Esempio n. 15
0
    def __init__(self, outdim, hiddim=15):
        """ Create an EvolinoNetwork with for sequences of dimension outdim and
        hiddim dimension of the RNN Layer."""
        indim = 0
        Module.__init__(self, indim, outdim)

        self._network = RecurrentNetwork()
        self._in_layer = LinearLayer(indim + outdim)
        self._hid_layer = LSTMLayer(hiddim)
        self._out_layer = LinearLayer(outdim)
        self._bias = BiasUnit()

        self._network.addInputModule(self._in_layer)
        self._network.addModule(self._hid_layer)
        self._network.addModule(self._bias)
        self._network.addOutputModule(self._out_layer)

        self._in_to_hid_connection = FullConnection(self._in_layer,
                                                    self._hid_layer)
        self._bias_to_hid_connection = FullConnection(self._bias,
                                                      self._hid_layer)
        self._hid_to_out_connection = FullConnection(self._hid_layer,
                                                     self._out_layer)
        self._network.addConnection(self._in_to_hid_connection)
        self._network.addConnection(self._bias_to_hid_connection)
        self._network.addConnection(self._hid_to_out_connection)

        self._recurrent_connection = FullConnection(self._hid_layer,
                                                    self._hid_layer)
        self._network.addRecurrentConnection(self._recurrent_connection)

        self._network.sortModules()
        self._network.reset()

        self.offset = self._network.offset
        self.backprojectionFactor = 0.01
Esempio n. 16
0
 def _backwardImplementation(self, outerr, inerr, inbuf):
     FullConnection._backwardImplementation(self, outerr, inerr, inbuf)
Esempio n. 17
0
 def _forwardImplementation(self, inbuf, outbuf):
     FullConnection._forwardImplementation(self, inbuf, outbuf)
Esempio n. 18
0
from pybrain.structure.modules.sigmoidlayer import SigmoidLayer
from pybrain.structure.modules.linearlayer import LinearLayer
from pybrain.structure.modules.biasunit import BiasUnit
from pybrain.structure.connections.full import FullConnection

rede = FeedForwardNetwork()
camadaEntrada = LinearLayer(2)
camadaOculta = SigmoidLayer(3)
camadaSaida = SigmoidLayer(1)
bias1 = BiasUnit()
bias2 = BiasUnit()

rede.addModule(camadaEntrada)
rede.addModule(camadaOculta)
rede.addModule(camadaSaida)
rede.addModule(bias1)
rede.addModule(bias2)

entradaOculta = FullConnection(camadaEntrada, camadaOculta)
ocultaSaida = FullConnection(camadaOculta, camadaSaida)
biasOculta = FullConnection(bias1,camadaOculta)
biasSaida = FullConnection(bias2,camadaSaida)

rede.sortModules()

print(rede)
print(entradaOculta.params)
print(ocultaSaida.params)
print(biasOculta.params)
print(biasSaida.params)
#ChampionDictionary holds all the riot static data about each champion. The Riot IDs are the keys of the dictionary
championdictionary = DatabaseActions.CreateChampionDictionary()

#Creates a Neural Network of Appropriate size
predictionNet = FeedForwardNetwork()

inLayer = LinearLayer(len(championdictionary))
hiddenLayer = SigmoidLayer(5)
outLayer = SigmoidLayer(1)

predictionNet.addInputModule(inLayer)
predictionNet.addModule(hiddenLayer)
predictionNet.addOutputModule(outLayer)
predictionNet.addModule(BiasUnit(name = 'bias'))

in_to_hidden = FullConnection(inLayer,hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer,outLayer)

predictionNet.addConnection(in_to_hidden)
predictionNet.addConnection(hidden_to_out)
predictionNet.addConnection(FullConnection(predictionNet['bias'],hiddenLayer))
predictionNet.addConnection(FullConnection(predictionNet['bias'],outLayer))
predictionNet.sortModules()


trainingSet = SupervisedDataSet(len(championdictionary),1)

#Takes each game and turns it into a vector. -1 is stored if the champion is on the opposing team, 1 if the champion is on the player's team
#and 0 if it wasn't played. The vector is then fed into the Neural Network's Training Set
print "Adding Games to NN"
for game in aramdata.readlines():
Esempio n. 20
0
from pybrain.structure.modules.sigmoidlayer import SigmoidLayer
from pybrain.structure.modules.linearlayer import LinearLayer
from pybrain.structure.connections.full import FullConnection
from pybrain.tools.xml.networkwriter import NetworkWriter

dim = 381
n = FeedForwardNetwork()
inLayer = LinearLayer(dim)
hiddenLayer = SigmoidLayer(100)
outLayer = LinearLayer(1)

n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addOutputModule(outLayer)

in_to_hidden = FullConnection(inLayer,hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer,outLayer)

n.addConnection(in_to_hidden)
n.addConnection(hidden_to_out)

n.sortModules()


print 'build set'

alldata = ClassificationDataSet(dim, 1, nb_classes=2)

(data,label,items) = BinReader.readData(ur'F:\AliRecommendHomeworkData\1212新版\train15_17.expand.samp.norm.bin') 
#(train,label,data) = BinReader.readData(r'C:\data\small\norm\train1217.bin')
for i in range(len(data)):
ds.addSample((0, 0), (0, ))
ds.addSample((0, 1), (1, ))
ds.addSample((1, 0), (1, ))
ds.addSample((1, 1), (0, ))

for input, target in ds:
    print(input, target)

#define net
net = RecurrentNetwork()
net.addInputModule(LinearLayer(2, name="il"))
net.addModule(SigmoidLayer(4, name="h1"))
net.addModule(SigmoidLayer(4, name="h2"))
net.addOutputModule(LinearLayer(1, name="ol"))
c1 = FullConnection(net["il"], net["h1"])
c2 = FullConnection(net["h1"], net["h2"])
c3 = FullConnection(net["h2"], net["ol"])
cr1 = FullConnection(net["h1"], net["h1"])
net.addConnection(c1)
net.addConnection(c2)
net.addConnection(c3)
net.addRecurrentConnection(cr1)
net.sortModules()

print(net)

trainer = BackpropTrainer(net, ds)

for i in range(20):
    for j in range(1000):
Esempio n. 22
0
ds = SupervisedDataSet(2, 1)

ds.addSample((0, 0), (0, ))
ds.addSample((0, 1), (1, ))
ds.addSample((1, 0), (1, ))
ds.addSample((1, 1), (0, ))

for input, target in ds:
    print(input, target)

#define layers and connections
inLayer = LinearLayer(2)
hiddenLayerOne = SigmoidLayer(4, "one")
hiddenLayerTwo = SigmoidLayer(4, "two")
outLayer = LinearLayer(1)
inToHiddenOne = FullConnection(inLayer, hiddenLayerOne)
hiddenOneToTwo = FullConnection(hiddenLayerOne, hiddenLayerTwo)
hiddenTwoToOut = FullConnection(hiddenLayerTwo, outLayer)

#wire the layers and connections to a net
net = FeedForwardNetwork()
net.addInputModule(inLayer)
net.addModule(hiddenLayerOne)
net.addModule(hiddenLayerTwo)
net.addOutputModule(outLayer)
net.addConnection(inToHiddenOne)
net.addConnection(hiddenOneToTwo)
net.addConnection(hiddenTwoToOut)
net.sortModules()

print(net)
Esempio n. 23
0
    def buildBMTrainer(self):
        x, y = self.readexcel()
        # 模拟size条数据:
        # self.writeexcel(size=100)
        # resx=contrib(x,0.9)
        # print '**********************'
        # print resx
        # x1=x[:,[3,4,5,6,7,8,9,10,11,0,1,2]]
        # resx1=contrib(x1)
        # print '**********************'
        # print resx1

        self.realy = y
        per = int(len(x))
        # 对数据进行归一化处理(一般来说使用Sigmoid时一定要归一化)
        self.sx = MinMaxScaler()
        self.sy = MinMaxScaler()

        xTrain = x[:per]
        xTrain = self.sx.fit_transform(xTrain)
        yTrain = y[:per]
        yTrain = self.sy.fit_transform(yTrain)

        # 初始化前馈神经网络
        self.__fnn = FeedForwardNetwork()

        # 构建输入层,隐藏层和输出层,一般隐藏层为3-5层,不宜过多
        inLayer = LinearLayer(x.shape[1], 'inLayer')
        hiddenLayer0 = SigmoidLayer(int(self.hiddendim / 3), 'hiddenLayer0')
        hiddenLayer1 = TanhLayer(self.hiddendim, 'hiddenLayer1')
        hiddenLayer2 = SigmoidLayer(int(self.hiddendim / 3), 'hiddenLayer2')
        outLayer = LinearLayer(self.rescol, 'outLayer')

        # 将构建的输出层、隐藏层、输出层加入到fnn中
        self.__fnn.addInputModule(inLayer)
        self.__fnn.addModule(hiddenLayer0)
        self.__fnn.addModule(hiddenLayer1)
        self.__fnn.addModule(hiddenLayer2)
        self.__fnn.addOutputModule(outLayer)

        # 对各层之间建立完全连接
        in_to_hidden = FullConnection(inLayer, hiddenLayer0)
        hidden_to_hidden0 = FullConnection(hiddenLayer0, hiddenLayer1)
        hidden_to_hidden1 = FullConnection(hiddenLayer1, hiddenLayer2)
        hidden_to_out = FullConnection(hiddenLayer2, outLayer)

        # 与fnn建立连接
        self.__fnn.addConnection(in_to_hidden)
        self.__fnn.addConnection(hidden_to_hidden0)
        self.__fnn.addConnection(hidden_to_hidden1)
        self.__fnn.addConnection(hidden_to_out)
        self.__fnn.sortModules()
        # 初始化监督数据集
        DS = SupervisedDataSet(x.shape[1], self.rescol)

        # 将训练的数据及标签加入到DS中
        # for i in range(len(xTrain)):
        #     DS.addSample(xTrain[i], yTrain[i])
        for i in range(len(xTrain)):
            DS.addSample(xTrain[i], yTrain[i])

        # 采用BP进行训练,训练至收敛,最大训练次数为1000
        trainer = BMBackpropTrainer(self.__fnn,
                                    DS,
                                    learningrate=0.0001,
                                    verbose=self.verbose)
        if self.myalg:
            trainingErrors = trainer.bmtrain(maxEpochs=10000,
                                             verbose=True,
                                             continueEpochs=3000,
                                             totalError=0.0001)
        else:
            trainingErrors = trainer.trainUntilConvergence(
                maxEpochs=10000, continueEpochs=3000, validationProportion=0.1)
        # CV = CrossValidator(trainer, DS, n_folds=4, valfunc=ModuleValidator.MSE)
        # CV.validate()
        # CrossValidator
        # trainingErrors = trainer.trainUntilConvergence(maxEpochs=10000,continueEpochs=5000, validationProportion=0.1)
        # self.finalError = trainingErrors[0][-2]
        # self.finalerror=trainingErrors[0][-2]
        # if (self.verbose):
        #     print '最后总体容差:', self.finalError
        self.__sy = self.sy
        self.__sx = self.sx
        for i in range(len(xTrain)):
            a = self.sy.inverse_transform(
                self.__fnn.activate(xTrain[i]).reshape(-1, 1))
            self.restest.append(
                self.sy.inverse_transform(
                    self.__fnn.activate(xTrain[i]).reshape(-1, 1))[0][0])
Esempio n. 24
0
 def _backwardImplementation(self, outerr, inerr, inbuf):
     FullConnection._backwardImplementation(self, outerr, inerr, inbuf)
Esempio n. 25
0
 def _forwardImplementation(self, inbuf, outbuf):
     FullConnection._forwardImplementation(self, inbuf, outbuf)
Esempio n. 26
0
from pybrain.structure.connections.full import FullConnection
from pybrain.structure.modules.linearlayer import LinearLayer
from pybrain.structure.modules.sigmoidlayer import SigmoidLayer
from pybrain.structure.networks.feedforward import FeedForwardNetwork
from pybrain.supervised.trainers.backprop import BackpropTrainer

network = FeedForwardNetwork()  # create network
inputLayer = SigmoidLayer(1)  # maybe LinearLayer ?
hiddenLayer = SigmoidLayer(4)
outputLayer = SigmoidLayer(1)  # maybe LinearLayer ?

network.addInputModule(inputLayer)
network.addModule(hiddenLayer)
network.addOutputModule(outputLayer)
# Connection
network.addConnection(FullConnection(inputLayer, hiddenLayer))
network.addConnection(FullConnection(hiddenLayer, outputLayer))

network.sortModules()

dataTrain = SupervisedDataSet(1, 1)  # input, target
dataTrain.addSample(
    1, 0.76
)  # it seems to me that input(our x), target(value y) from function sin(x)*sin(2*x)

trainer = BackpropTrainer(
    network, dataTrain)  # it's back prop, we use our network and our data
print(trainer.train())  # i think it's value trained

print(network.params)  # i think that are wights
# print(network)