예제 #1
0
def trainedRNN():
    n = RecurrentNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.addRecurrentConnection(NMConnection(n['out'], n['out'], name='nmc'))
    # n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], inSliceFrom = 0, inSliceTo = 1, outSliceFrom = 0, outSliceTo = 3))
    n.sortModules()

    draw_connections(n)
    d = getDatasetFromFile(root.path() + "/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 50:
            return trainedRNN()
    # exportRNN(n)
    draw_connections(n)

    return n
예제 #2
0
def trainedANN():
    n = FeedForwardNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()

    draw_connections(n)
    # d = generateTrainingData()
    d = getDatasetFromFile(root.path() + "/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 20:
            return trainedANN()

    exportANN(n)
    draw_connections(n)

    return n
예제 #3
0
def trained_cat_dog_ANN():
    n = FeedForwardNetwork()
    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size + 1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.sortModules()
    n.convertToFastNetwork()
    print 'successful converted to fast network'
    t = BackpropTrainer(n, d, learningrate=0.0001)  #, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break

    exportCatDogANN(n)
    return n
예제 #4
0
    def __init__(self, indim, outdim, hiddim=6):
        Module.__init__(self, indim, outdim)

        self._network = Network()
        self._in_layer = LinearLayer(indim + outdim)
        self._hid_layer = LSTMLayer(hiddim)
        self._out_layer = LinearLayer(outdim)
        self._bias = BiasUnit()

        self._network.addInputModule(self._in_layer)
        self._network.addModule(self._hid_layer)
        self._network.addModule(self._bias)
        self._network.addOutputModule(self._out_layer)

        self._hid_to_out_connection = FullConnection(self._hid_layer,
                                                     self._out_layer)
        self._in_to_hid_connection = FullConnection(self._in_layer,
                                                    self._hid_layer)
        self._network.addConnection(self._hid_to_out_connection)
        self._network.addConnection(self._in_to_hid_connection)
        self._network.addConnection(FullConnection(self._bias,
                                                   self._hid_layer))

        self._network.sortModules()

        self.offset = self._network.offset
        self.backprojectionFactor = 0.01
예제 #5
0
def trained_cat_dog_RFCNN():
    n = RecurrentNetwork()

    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size + 1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], name='nmc'))
    n.sortModules()

    t = BackpropTrainer(n, d, learningrate=0.0001)  #, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break

    exportCatDogRFCNN(n)
    return n
예제 #6
0
def importCatDogANN(fileName=root.path() + "/res/recCatDogANN"):
    n = FeedForwardNetwork()
    n.addInputModule(LinearLayer(7500, name='in'))
    n.addModule(SigmoidLayer(9000, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()
    params = np.load(root.path() + '/res/cat_dog_params.txt.npy')
    n._setParameters(params)
    return n
예제 #7
0
    def __init__(self, boardSize, convSize, numFeatureMaps, **args):
        inputdim = 2
        FeedForwardNetwork.__init__(self, **args)
        inlayer = LinearLayer(inputdim * boardSize * boardSize, name='in')
        self.addInputModule(inlayer)

        # we need some treatment of the border too - thus we pad the direct board input.
        x = convSize / 2
        insize = boardSize + 2 * x
        if convSize % 2 == 0:
            insize -= 1
        paddedlayer = LinearLayer(inputdim * insize * insize, name='pad')
        self.addModule(paddedlayer)

        # we connect a bias to the padded-parts (with shared but trainable weights).
        bias = BiasUnit()
        self.addModule(bias)
        biasConn = MotherConnection(inputdim)

        paddable = []
        if convSize % 2 == 0:
            xs = range(x) + range(insize - x + 1, insize)
        else:
            xs = range(x) + range(insize - x, insize)
        paddable.extend(crossproduct([range(insize), xs]))
        paddable.extend(crossproduct([xs, range(x, boardSize + x)]))

        for (i, j) in paddable:
            self.addConnection(
                SharedFullConnection(biasConn,
                                     bias,
                                     paddedlayer,
                                     outSliceFrom=(i * insize + j) * inputdim,
                                     outSliceTo=(i * insize + j + 1) *
                                     inputdim))

        for i in range(boardSize):
            inmod = ModuleSlice(inlayer,
                                outSliceFrom=i * boardSize * inputdim,
                                outSliceTo=(i + 1) * boardSize * inputdim)
            outmod = ModuleSlice(paddedlayer,
                                 inSliceFrom=((i + x) * insize + x) * inputdim,
                                 inSliceTo=((i + x) * insize + x + boardSize) *
                                 inputdim)
            self.addConnection(IdentityConnection(inmod, outmod))

        self._buildStructure(inputdim, insize, paddedlayer, convSize,
                             numFeatureMaps)
        self.sortModules()
예제 #8
0
def trained3ONN():
    n = FeedForwardNetwork()

    inp = LinearLayer(176850, name='input')
    hid = LinearLayer(3, name='hidden')
    out = LinearLayer(1, name='output')

    #add modules
    n.addOutputModule(out)
    n.addInputModule(inp)
    n.addModule(hid)

    #add connections
    n.addConnection(FullConnection(inp, hid, inSliceTo=100, outSliceTo=1))
    n.addConnection(
        FullConnection(inp,
                       hid,
                       inSliceFrom=100,
                       inSliceTo=5150,
                       outSliceFrom=1,
                       outSliceTo=2))
    n.addConnection(FullConnection(inp, hid, inSliceFrom=5150, outSliceFrom=2))
    n.addConnection(FullConnection(hid, out))

    n.sortModules()
    print "Network created"
    d = load3OrderDataSet()
    print "Data loaded"
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs
    print "Learning started"
    count = 0
    while True:
        globErr = t.train()
        print "iteration #", count, " error = ", globErr
        if globErr < 0.01:
            break
        count = count + 1
        # if (count == 100):
        #     break

    # for i in range(100):
    #     print t.train()

    exportANN(n)

    return n
예제 #9
0
def add_modules(net):
    modules = {}

    #define modules
    modules['inp'] = LinearLayer(400)
    modules["input_bias"] = BiasUnit()
    modules['h1'] = TanhLayer(300)
    modules['h1_bias'] = BiasUnit()
    modules['h2'] = TanhLayer(200)
    modules['h2_bias'] = BiasUnit()
    #modules['h3'] = neurons.euclideanDistance(100)
    modules['outp'] = SoftmaxLayer(2)
    modules['output_bias'] = BiasUnit()

    # add modules
    net.addInputModule(modules['inp'])
    net.addOutputModule(modules['outp'])
    net.addModule(modules['h1'])
    net.addModule(modules['h2'])
    net.addModule(modules['input_bias'])
    net.addModule(modules['h1_bias'])
    net.addModule(modules['h2_bias'])
    net.addModule(modules['output_bias'])
    #net.addModule(modules['h3'])

    return modules
예제 #10
0
 def __init__(self, inputdim, insize, convSize, numFeatureMaps, **args):
     FeedForwardNetwork.__init__(self, **args)
     inlayer = LinearLayer(inputdim * insize * insize)
     self.addInputModule(inlayer)
     self._buildStructure(inputdim, insize, inlayer, convSize,
                          numFeatureMaps)
     self.sortModules()
예제 #11
0
def buildSimpleLSTMNetwork(peepholes=False):
    N = RecurrentNetwork('simpleLstmNet')
    i = LinearLayer(100, name='i')
    h = LSTMLayer(10, peepholes=peepholes, name='lstm')
    o = LinearLayer(1, name='o')
    b = BiasUnit('bias')
    N.addModule(b)
    N.addOutputModule(o)
    N.addInputModule(i)
    N.addModule(h)
    N.addConnection(FullConnection(i, h, name='f1'))
    N.addConnection(FullConnection(b, h, name='f2'))
    N.addRecurrentConnection(FullConnection(h, h, name='r1'))
    N.addConnection(FullConnection(h, o, name='r1'))
    N.sortModules()
    return N
예제 #12
0
 def __init__(self, predefined = None, **kwargs):
     """ For the current implementation, the sequence length 
     needs to be fixed, and given at construction time. """
     if predefined is not None:
         self.predefined = predefined
     else:
         self.predefined = {}
     FeedForwardNetwork.__init__(self, **kwargs)
     assert self.seqlen is not None
     
     # the input is a 1D-mesh (as a view on a flat input layer)
     inmod = LinearLayer(self.inputsize * self.seqlen, name='input')
     inmesh = ModuleMesh.viewOnFlatLayer(inmod, (self.seqlen,), 'inmesh')
     
     # the output is also a 1D-mesh 
     outmod = self.outcomponentclass(self.outputsize * self.seqlen, name='output')
     outmesh = ModuleMesh.viewOnFlatLayer(outmod, (self.seqlen,), 'outmesh')
     
     # the hidden layers are places in a 2xseqlen mesh
     hiddenmesh = ModuleMesh.constructWithLayers(self.componentclass, self.hiddensize,
                                                 (2, self.seqlen), 'hidden')
     
     # add the modules
     for c in inmesh:
         self.addInputModule(c)
     for c in outmesh:
         self.addOutputModule(c)
     for c in hiddenmesh:
         self.addModule(c)
     
     # set the connections weights to be shared
     inconnf = MotherConnection(inmesh.componentOutdim * hiddenmesh.componentIndim, name='inconn')
     outconnf = MotherConnection(outmesh.componentIndim * hiddenmesh.componentOutdim, name='outconn')
     forwardconn = MotherConnection(hiddenmesh.componentIndim * hiddenmesh.componentOutdim, name='fconn')
     if self.symmetric:
         backwardconn = forwardconn
         inconnb = inconnf
         outconnb = outconnf
     else:
         backwardconn = MotherConnection(hiddenmesh.componentIndim * hiddenmesh.componentOutdim, name='bconn')
         inconnb = MotherConnection(inmesh.componentOutdim * hiddenmesh.componentIndim, name='inconn')
         outconnb = MotherConnection(outmesh.componentIndim * hiddenmesh.componentOutdim, name='outconn')
     
     # build the connections
     for i in range(self.seqlen):
         # input to hidden
         self.addConnection(SharedFullConnection(inconnf, inmesh[(i,)], hiddenmesh[(0, i)]))
         self.addConnection(SharedFullConnection(inconnb, inmesh[(i,)], hiddenmesh[(1, i)]))
         # hidden to output
         self.addConnection(SharedFullConnection(outconnf, hiddenmesh[(0, i)], outmesh[(i,)]))
         self.addConnection(SharedFullConnection(outconnb, hiddenmesh[(1, i)], outmesh[(i,)]))
         if i > 0:
             # forward in time
             self.addConnection(SharedFullConnection(forwardconn, hiddenmesh[(0, i - 1)], hiddenmesh[(0, i)]))
         if i < self.seqlen - 1:
             # backward in time
             self.addConnection(SharedFullConnection(backwardconn, hiddenmesh[(1, i + 1)], hiddenmesh[(1, i)]))
         
     self.sortModules()
예제 #13
0
def add_modules(net):
    modules = {}

    #define modules
    modules['inp'] = LinearLayer(40)
    modules['h1'] = reluLayer(20)
    modules['outp'] = SoftmaxLayer(2)

    # add modules
    net.addInputModule(modules['inp'])
    net.addOutputModule(modules['outp'])
    net.addModule(modules['h1'])

    return modules
예제 #14
0
    def __init__(self, outdim, hiddim=15):
        """ Create an EvolinoNetwork with for sequences of dimension outdim and
        hiddim dimension of the RNN Layer."""
        indim = 0
        Module.__init__(self, indim, outdim)

        self._network = RecurrentNetwork()
        self._in_layer = LinearLayer(indim + outdim)
        self._hid_layer = LSTMLayer(hiddim)
        self._out_layer = LinearLayer(outdim)
        self._bias = BiasUnit()

        self._network.addInputModule(self._in_layer)
        self._network.addModule(self._hid_layer)
        self._network.addModule(self._bias)
        self._network.addOutputModule(self._out_layer)

        self._in_to_hid_connection = FullConnection(self._in_layer,
                                                    self._hid_layer)
        self._bias_to_hid_connection = FullConnection(self._bias,
                                                      self._hid_layer)
        self._hid_to_out_connection = FullConnection(self._hid_layer,
                                                     self._out_layer)
        self._network.addConnection(self._in_to_hid_connection)
        self._network.addConnection(self._bias_to_hid_connection)
        self._network.addConnection(self._hid_to_out_connection)

        self._recurrent_connection = FullConnection(self._hid_layer,
                                                    self._hid_layer)
        self._network.addRecurrentConnection(self._recurrent_connection)

        self._network.sortModules()
        self._network.reset()

        self.offset = self._network.offset
        self.backprojectionFactor = 0.01
예제 #15
0
from pybrain.structure import FeedForwardNetwork
from pybrain.structure.modules.sigmoidlayer import SigmoidLayer
from pybrain.structure.modules.linearlayer import LinearLayer
from pybrain.structure.modules.biasunit import BiasUnit
from pybrain.structure.connections.full import FullConnection

rede = FeedForwardNetwork()
camadaEntrada = LinearLayer(2)
camadaOculta = SigmoidLayer(3)
camadaSaida = SigmoidLayer(1)
bias1 = BiasUnit()
bias2 = BiasUnit()

rede.addModule(camadaEntrada)
rede.addModule(camadaOculta)
rede.addModule(camadaSaida)
rede.addModule(bias1)
rede.addModule(bias2)

entradaOculta = FullConnection(camadaEntrada, camadaOculta)
ocultaSaida = FullConnection(camadaOculta, camadaSaida)
biasOculta = FullConnection(bias1,camadaOculta)
biasSaida = FullConnection(bias2,camadaSaida)

rede.sortModules()

print(rede)
print(entradaOculta.params)
print(ocultaSaida.params)
print(biasOculta.params)
print(biasSaida.params)
import MNIST_Data
# create an Object to get the data source
dataObject = MNIST_Data.MNIST_Processing()
traininglist = dataObject.neural_data_set
traininglabels = dataObject.neural_label_set

# step1
#create neural network
fnn = FeedForwardNetwork()

#set three layers, input+ hidden layer+ output  28*28=784

# the first feature extraction
#inLayer = LinearLayer(784,name='inLayer')
# the second feature extraction
inLayer = LinearLayer(28, name='inLayer')
hiddenLayer = SigmoidLayer(30, name='hiddenLayer0')
outLayer = LinearLayer(10, name='outLayer')

#There are a couple of different classes of layers. For a complete list check out the modules package.

#add these three Layers into neural network
fnn.addInputModule(inLayer)
fnn.addModule(hiddenLayer)
fnn.addOutputModule(outLayer)

#create the connections between three layers
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)

#add connections into network
예제 #17
0
    print(identifier, net.activate((0, 0)), net.activate((0, 1)),
          net.activate((1, 0)), net.activate((1, 1)))


ds = SupervisedDataSet(2, 1)

ds.addSample((0, 0), (0, ))
ds.addSample((0, 1), (1, ))
ds.addSample((1, 0), (1, ))
ds.addSample((1, 1), (0, ))

for input, target in ds:
    print(input, target)

#define layers and connections
inLayer = LinearLayer(2)
hiddenLayerOne = SigmoidLayer(4, "one")
hiddenLayerTwo = SigmoidLayer(4, "two")
outLayer = LinearLayer(1)
inToHiddenOne = FullConnection(inLayer, hiddenLayerOne)
hiddenOneToTwo = FullConnection(hiddenLayerOne, hiddenLayerTwo)
hiddenTwoToOut = FullConnection(hiddenLayerTwo, outLayer)

#wire the layers and connections to a net
net = FeedForwardNetwork()
net.addInputModule(inLayer)
net.addModule(hiddenLayerOne)
net.addModule(hiddenLayerTwo)
net.addOutputModule(outLayer)
net.addConnection(inToHiddenOne)
net.addConnection(hiddenOneToTwo)
예제 #18
0
    def buildBMTrainer(self):
        x, y = self.readexcel()
        # 模拟size条数据:
        # self.writeexcel(size=100)
        # resx=contrib(x,0.9)
        # print '**********************'
        # print resx
        # x1=x[:,[3,4,5,6,7,8,9,10,11,0,1,2]]
        # resx1=contrib(x1)
        # print '**********************'
        # print resx1

        self.realy = y
        per = int(len(x))
        # 对数据进行归一化处理(一般来说使用Sigmoid时一定要归一化)
        self.sx = MinMaxScaler()
        self.sy = MinMaxScaler()

        xTrain = x[:per]
        xTrain = self.sx.fit_transform(xTrain)
        yTrain = y[:per]
        yTrain = self.sy.fit_transform(yTrain)

        # 初始化前馈神经网络
        self.__fnn = FeedForwardNetwork()

        # 构建输入层,隐藏层和输出层,一般隐藏层为3-5层,不宜过多
        inLayer = LinearLayer(x.shape[1], 'inLayer')
        hiddenLayer0 = SigmoidLayer(int(self.hiddendim / 3), 'hiddenLayer0')
        hiddenLayer1 = TanhLayer(self.hiddendim, 'hiddenLayer1')
        hiddenLayer2 = SigmoidLayer(int(self.hiddendim / 3), 'hiddenLayer2')
        outLayer = LinearLayer(self.rescol, 'outLayer')

        # 将构建的输出层、隐藏层、输出层加入到fnn中
        self.__fnn.addInputModule(inLayer)
        self.__fnn.addModule(hiddenLayer0)
        self.__fnn.addModule(hiddenLayer1)
        self.__fnn.addModule(hiddenLayer2)
        self.__fnn.addOutputModule(outLayer)

        # 对各层之间建立完全连接
        in_to_hidden = FullConnection(inLayer, hiddenLayer0)
        hidden_to_hidden0 = FullConnection(hiddenLayer0, hiddenLayer1)
        hidden_to_hidden1 = FullConnection(hiddenLayer1, hiddenLayer2)
        hidden_to_out = FullConnection(hiddenLayer2, outLayer)

        # 与fnn建立连接
        self.__fnn.addConnection(in_to_hidden)
        self.__fnn.addConnection(hidden_to_hidden0)
        self.__fnn.addConnection(hidden_to_hidden1)
        self.__fnn.addConnection(hidden_to_out)
        self.__fnn.sortModules()
        # 初始化监督数据集
        DS = SupervisedDataSet(x.shape[1], self.rescol)

        # 将训练的数据及标签加入到DS中
        # for i in range(len(xTrain)):
        #     DS.addSample(xTrain[i], yTrain[i])
        for i in range(len(xTrain)):
            DS.addSample(xTrain[i], yTrain[i])

        # 采用BP进行训练,训练至收敛,最大训练次数为1000
        trainer = BMBackpropTrainer(self.__fnn,
                                    DS,
                                    learningrate=0.0001,
                                    verbose=self.verbose)
        if self.myalg:
            trainingErrors = trainer.bmtrain(maxEpochs=10000,
                                             verbose=True,
                                             continueEpochs=3000,
                                             totalError=0.0001)
        else:
            trainingErrors = trainer.trainUntilConvergence(
                maxEpochs=10000, continueEpochs=3000, validationProportion=0.1)
        # CV = CrossValidator(trainer, DS, n_folds=4, valfunc=ModuleValidator.MSE)
        # CV.validate()
        # CrossValidator
        # trainingErrors = trainer.trainUntilConvergence(maxEpochs=10000,continueEpochs=5000, validationProportion=0.1)
        # self.finalError = trainingErrors[0][-2]
        # self.finalerror=trainingErrors[0][-2]
        # if (self.verbose):
        #     print '最后总体容差:', self.finalError
        self.__sy = self.sy
        self.__sx = self.sx
        for i in range(len(xTrain)):
            a = self.sy.inverse_transform(
                self.__fnn.activate(xTrain[i]).reshape(-1, 1))
            self.restest.append(
                self.sy.inverse_transform(
                    self.__fnn.activate(xTrain[i]).reshape(-1, 1))[0][0])
from pybrain.structure.networks.feedforward import FeedForwardNetwork
from pybrain.structure.modules.linearlayer import LinearLayer
from pybrain.structure.modules.sigmoidlayer import SigmoidLayer
from pybrain.structure.connections.full import FullConnection
from pybrain.structure.modules.biasunit import BiasUnit

# Next we transform the data into a vectorized format so that it can be used as a training set
aramdata = open("ARAMData.txt","r")

#ChampionDictionary holds all the riot static data about each champion. The Riot IDs are the keys of the dictionary
championdictionary = DatabaseActions.CreateChampionDictionary()

#Creates a Neural Network of Appropriate size
predictionNet = FeedForwardNetwork()

inLayer = LinearLayer(len(championdictionary))
hiddenLayer = SigmoidLayer(5)
outLayer = SigmoidLayer(1)

predictionNet.addInputModule(inLayer)
predictionNet.addModule(hiddenLayer)
predictionNet.addOutputModule(outLayer)
predictionNet.addModule(BiasUnit(name = 'bias'))

in_to_hidden = FullConnection(inLayer,hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer,outLayer)

predictionNet.addConnection(in_to_hidden)
predictionNet.addConnection(hidden_to_out)
predictionNet.addConnection(FullConnection(predictionNet['bias'],hiddenLayer))
predictionNet.addConnection(FullConnection(predictionNet['bias'],outLayer))
예제 #20
0
    print "Doing final selection"
    finalAverages = []
    for individual in finalSelection:
        net._setParameters(individual)
        scores = []
        for i in range(100):
            sc = game.runGame(play)
            scores.append(sc)
        finalAverages.append(sum(scores) / len(scores))

    fittestIndividual = finalSelection[finalAverages.index(max(finalAverages))]
    return fittestIndividual


net = FeedForwardNetwork()
inLayer = LinearLayer(16)
hiddenLayer = SigmoidLayer(10)
outLayer = SigmoidLayer(2)
net.addInputModule(inLayer)
net.addModule(hiddenLayer)
net.addOutputModule(outLayer)
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
net.addConnection(in_to_hidden)
net.addConnection(hidden_to_out)
net.sortModules()
'''Lernvariablen'''
populationSize = 100  #Anzahl der Individuuen
selectedIndividualSize = populationSize / 5  #Anzahl an indiviuen, die an die naechste Generation weitergegeben wird
gamesPerIndividual = 30  #Anzahl Spiele per Individuum um Fitness zu bestimmen (10-50 max.)
generationCount = 500  #Anzahl an Generationen
예제 #21
0
          net.activate((1, 0)), net.activate((1, 1)))


ds = SupervisedDataSet(2, 1)

ds.addSample((0, 0), (0, ))
ds.addSample((0, 1), (1, ))
ds.addSample((1, 0), (1, ))
ds.addSample((1, 1), (0, ))

for input, target in ds:
    print(input, target)

#define net
net = RecurrentNetwork()
net.addInputModule(LinearLayer(2, name="il"))
net.addModule(SigmoidLayer(4, name="h1"))
net.addModule(SigmoidLayer(4, name="h2"))
net.addOutputModule(LinearLayer(1, name="ol"))
c1 = FullConnection(net["il"], net["h1"])
c2 = FullConnection(net["h1"], net["h2"])
c3 = FullConnection(net["h2"], net["ol"])
cr1 = FullConnection(net["h1"], net["h1"])
net.addConnection(c1)
net.addConnection(c2)
net.addConnection(c3)
net.addRecurrentConnection(cr1)
net.sortModules()

print(net)
예제 #22
0
import pybrain
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised.trainers.backprop import BackpropTrainer
from pybrain.datasets.supervised import SupervisedDataSet
from BinReader import BinReader
from pybrain.utilities import percentError
from pybrain.datasets.classification import ClassificationDataSet
from pybrain.structure.networks.feedforward import FeedForwardNetwork
from pybrain.structure.modules.sigmoidlayer import SigmoidLayer
from pybrain.structure.modules.linearlayer import LinearLayer
from pybrain.structure.connections.full import FullConnection
from pybrain.tools.xml.networkwriter import NetworkWriter

dim = 381
n = FeedForwardNetwork()
inLayer = LinearLayer(dim)
hiddenLayer = SigmoidLayer(100)
outLayer = LinearLayer(1)

n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addOutputModule(outLayer)

in_to_hidden = FullConnection(inLayer,hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer,outLayer)

n.addConnection(in_to_hidden)
n.addConnection(hidden_to_out)

n.sortModules()