def __init__(self, inputdim, insize, convSize, numFeatureMaps, **args):
     FeedForwardNetwork.__init__(self, **args)
     inlayer = LinearLayer(inputdim * insize * insize)
     self.addInputModule(inlayer)
     self._buildStructure(inputdim, insize, inlayer, convSize,
                          numFeatureMaps)
     self.sortModules()
Example #2
0
 def __init__(self, predefined = None, **kwargs):
     """ For the current implementation, the sequence length 
     needs to be fixed, and given at construction time. """
     if predefined is not None:
         self.predefined = predefined
     else:
         self.predefined = {}
     FeedForwardNetwork.__init__(self, **kwargs)
     assert self.seqlen is not None
     
     # the input is a 1D-mesh (as a view on a flat input layer)
     inmod = LinearLayer(self.inputsize * self.seqlen, name='input')
     inmesh = ModuleMesh.viewOnFlatLayer(inmod, (self.seqlen,), 'inmesh')
     
     # the output is also a 1D-mesh 
     outmod = self.outcomponentclass(self.outputsize * self.seqlen, name='output')
     outmesh = ModuleMesh.viewOnFlatLayer(outmod, (self.seqlen,), 'outmesh')
     
     # the hidden layers are places in a 2xseqlen mesh
     hiddenmesh = ModuleMesh.constructWithLayers(self.componentclass, self.hiddensize,
                                                 (2, self.seqlen), 'hidden')
     
     # add the modules
     for c in inmesh:
         self.addInputModule(c)
     for c in outmesh:
         self.addOutputModule(c)
     for c in hiddenmesh:
         self.addModule(c)
     
     # set the connections weights to be shared
     inconnf = MotherConnection(inmesh.componentOutdim * hiddenmesh.componentIndim, name='inconn')
     outconnf = MotherConnection(outmesh.componentIndim * hiddenmesh.componentOutdim, name='outconn')
     forwardconn = MotherConnection(hiddenmesh.componentIndim * hiddenmesh.componentOutdim, name='fconn')
     if self.symmetric:
         backwardconn = forwardconn
         inconnb = inconnf
         outconnb = outconnf
     else:
         backwardconn = MotherConnection(hiddenmesh.componentIndim * hiddenmesh.componentOutdim, name='bconn')
         inconnb = MotherConnection(inmesh.componentOutdim * hiddenmesh.componentIndim, name='inconn')
         outconnb = MotherConnection(outmesh.componentIndim * hiddenmesh.componentOutdim, name='outconn')
     
     # build the connections
     for i in range(self.seqlen):
         # input to hidden
         self.addConnection(SharedFullConnection(inconnf, inmesh[(i,)], hiddenmesh[(0, i)]))
         self.addConnection(SharedFullConnection(inconnb, inmesh[(i,)], hiddenmesh[(1, i)]))
         # hidden to output
         self.addConnection(SharedFullConnection(outconnf, hiddenmesh[(0, i)], outmesh[(i,)]))
         self.addConnection(SharedFullConnection(outconnb, hiddenmesh[(1, i)], outmesh[(i,)]))
         if i > 0:
             # forward in time
             self.addConnection(SharedFullConnection(forwardconn, hiddenmesh[(0, i - 1)], hiddenmesh[(0, i)]))
         if i < self.seqlen - 1:
             # backward in time
             self.addConnection(SharedFullConnection(backwardconn, hiddenmesh[(1, i + 1)], hiddenmesh[(1, i)]))
         
     self.sortModules()
Example #3
0
    def __init__(self, predefined = None, **kwargs):
        """ For the current implementation, the sequence length
        needs to be fixed, and given at construction time. """
        if predefined is not None:
            self.predefined = predefined
        else:
            self.predefined = {}
        FeedForwardNetwork.__init__(self, **kwargs)
        assert self.seqlen is not None

        # the input is a 1D-mesh (as a view on a flat input layer)
        inmod = LinearLayer(self.inputsize * self.seqlen, name='input')
        inmesh = ModuleMesh.viewOnFlatLayer(inmod, (self.seqlen,), 'inmesh')

        # the output is also a 1D-mesh
        outmod = self.outcomponentclass(self.outputsize * self.seqlen, name='output')
        outmesh = ModuleMesh.viewOnFlatLayer(outmod, (self.seqlen,), 'outmesh')

        # the hidden layers are places in a 2xseqlen mesh
        hiddenmesh = ModuleMesh.constructWithLayers(self.componentclass, self.hiddensize,
                                                    (2, self.seqlen), 'hidden')

        # add the modules
        for c in inmesh:
            self.addInputModule(c)
        for c in outmesh:
            self.addOutputModule(c)
        for c in hiddenmesh:
            self.addModule(c)

        # set the connections weights to be shared
        inconnf = MotherConnection(inmesh.componentOutdim * hiddenmesh.componentIndim, name='inconn')
        outconnf = MotherConnection(outmesh.componentIndim * hiddenmesh.componentOutdim, name='outconn')
        forwardconn = MotherConnection(hiddenmesh.componentIndim * hiddenmesh.componentOutdim, name='fconn')
        if self.symmetric:
            backwardconn = forwardconn
            inconnb = inconnf
            outconnb = outconnf
        else:
            backwardconn = MotherConnection(hiddenmesh.componentIndim * hiddenmesh.componentOutdim, name='bconn')
            inconnb = MotherConnection(inmesh.componentOutdim * hiddenmesh.componentIndim, name='inconn')
            outconnb = MotherConnection(outmesh.componentIndim * hiddenmesh.componentOutdim, name='outconn')

        # build the connections
        for i in range(self.seqlen):
            # input to hidden
            self.addConnection(SharedFullConnection(inconnf, inmesh[(i,)], hiddenmesh[(0, i)]))
            self.addConnection(SharedFullConnection(inconnb, inmesh[(i,)], hiddenmesh[(1, i)]))
            # hidden to output
            self.addConnection(SharedFullConnection(outconnf, hiddenmesh[(0, i)], outmesh[(i,)]))
            self.addConnection(SharedFullConnection(outconnb, hiddenmesh[(1, i)], outmesh[(i,)]))
            if i > 0:
                # forward in time
                self.addConnection(SharedFullConnection(forwardconn, hiddenmesh[(0, i - 1)], hiddenmesh[(0, i)]))
            if i < self.seqlen - 1:
                # backward in time
                self.addConnection(SharedFullConnection(backwardconn, hiddenmesh[(1, i + 1)], hiddenmesh[(1, i)]))

        self.sortModules()
Example #4
0
def createNet():
    net = FeedForwardNetwork()
    modules = add_modules(net)
    add_connections(net, modules)
    # finish up
    net.sortModules()
    gradientCheck(net)
    return net
def createNet():
    net = FeedForwardNetwork()
    modules = add_modules(net)
    add_connections(net, modules)
    # finish up
    net.sortModules()
    #gradientCheck(net)
    return net
Example #6
0
def buildSlicedNetwork():
    """ build a network with shared connections. Two hiddne modules are symetrically linked, but to a different 
    input neuron than the output neuron. The weights are random. """
    N = FeedForwardNetwork('sliced')
    a = LinearLayer(2, name='a')
    b = LinearLayer(2, name='b')
    N.addInputModule(a)
    N.addOutputModule(b)

    N.addConnection(FullConnection(a, b, inSliceTo=1, outSliceFrom=1))
    N.addConnection(FullConnection(a, b, inSliceFrom=1, outSliceTo=1))
    N.sortModules()
    return N
Example #7
0
    def __init__(self, boardSize, convSize, numFeatureMaps, **args):
        inputdim = 2
        FeedForwardNetwork.__init__(self, **args)
        inlayer = LinearLayer(inputdim * boardSize * boardSize, name='in')
        self.addInputModule(inlayer)

        # we need some treatment of the border too - thus we pad the direct board input.
        x = convSize / 2
        insize = boardSize + 2 * x
        if convSize % 2 == 0:
            insize -= 1
        paddedlayer = LinearLayer(inputdim * insize * insize, name='pad')
        self.addModule(paddedlayer)

        # we connect a bias to the padded-parts (with shared but trainable weights).
        bias = BiasUnit()
        self.addModule(bias)
        biasConn = MotherConnection(inputdim)

        paddable = []
        if convSize % 2 == 0:
            xs = range(x) + range(insize - x + 1, insize)
        else:
            xs = range(x) + range(insize - x, insize)
        paddable.extend(crossproduct([range(insize), xs]))
        paddable.extend(crossproduct([xs, range(x, boardSize + x)]))

        for (i, j) in paddable:
            self.addConnection(
                SharedFullConnection(biasConn,
                                     bias,
                                     paddedlayer,
                                     outSliceFrom=(i * insize + j) * inputdim,
                                     outSliceTo=(i * insize + j + 1) *
                                     inputdim))

        for i in range(boardSize):
            inmod = ModuleSlice(inlayer,
                                outSliceFrom=i * boardSize * inputdim,
                                outSliceTo=(i + 1) * boardSize * inputdim)
            outmod = ModuleSlice(paddedlayer,
                                 inSliceFrom=((i + x) * insize + x) * inputdim,
                                 inSliceTo=((i + x) * insize + x + boardSize) *
                                 inputdim)
            self.addConnection(IdentityConnection(inmod, outmod))

        self._buildStructure(inputdim, insize, paddedlayer, convSize,
                             numFeatureMaps)
        self.sortModules()
Example #8
0
def buildSlicedNetwork():
    """ build a network with shared connections. Two hiddne modules are symetrically linked, but to a different 
    input neuron than the output neuron. The weights are random. """
    N = FeedForwardNetwork('sliced')
    a = LinearLayer(2, name = 'a')
    b = LinearLayer(2, name = 'b')
    N.addInputModule(a)
    N.addOutputModule(b)
    
    N.addConnection(FullConnection(a, b, inSliceTo=1, outSliceFrom=1))
    N.addConnection(FullConnection(a, b, inSliceFrom=1, outSliceTo=1))
    N.sortModules()
    return N
 def __init__(self, boardSize, convSize, numFeatureMaps, **args):
     inputdim = 2
     FeedForwardNetwork.__init__(self, **args)
     inlayer = LinearLayer(inputdim*boardSize*boardSize, name = 'in')
     self.addInputModule(inlayer)
     
     # we need some treatment of the border too - thus we pad the direct board input.
     x = convSize/2
     insize = boardSize+2*x
     if convSize % 2 == 0: 
         insize -= 1            
     paddedlayer = LinearLayer(inputdim*insize*insize, name = 'pad')
     self.addModule(paddedlayer)
     
     # we connect a bias to the padded-parts (with shared but trainable weights).
     bias = BiasUnit()
     self.addModule(bias)
     biasConn = MotherConnection(inputdim)
     
     paddable = []
     if convSize % 2 == 0: 
         xs = range(x)+range(insize-x+1, insize)
     else:
         xs = range(x)+range(insize-x, insize)
     paddable.extend(crossproduct([range(insize), xs]))
     paddable.extend(crossproduct([xs, range(x, boardSize+x)]))
     
     for (i, j) in paddable:
         self.addConnection(SharedFullConnection(biasConn, bias, paddedlayer, 
                                                 outSliceFrom = (i*insize+j)*inputdim, 
                                                 outSliceTo = (i*insize+j+1)*inputdim))
             
     for i in range(boardSize):
         inmod = ModuleSlice(inlayer, outSliceFrom = i*boardSize*inputdim, 
                             outSliceTo = (i+1)*boardSize*inputdim)
         outmod = ModuleSlice(paddedlayer, inSliceFrom = ((i+x)*insize+x)*inputdim, 
                              inSliceTo = ((i+x)*insize+x+boardSize)*inputdim)
         self.addConnection(IdentityConnection(inmod, outmod))
         
     self._buildStructure(inputdim, insize, paddedlayer, convSize, numFeatureMaps)
     self.sortModules()
                     
Example #10
0
    def training(self,d):
        """
        Builds a network ,trains and returns it
        """

        self.net = FeedForwardNetwork()

        inLayer = LinearLayer(4) # 4 inputs
        hiddenLayer = SigmoidLayer(3) # 5 neurons on hidden layer with sigmoid function
        outLayer = LinearLayer(2) # 2 neuron as output layer


        "add layers to NN"
        self.net.addInputModule(inLayer)
        self.net.addModule(hiddenLayer)
        self.net.addOutputModule(outLayer)

        "create connections"
        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_out = FullConnection(hiddenLayer, outLayer)

        "add connections"
        self.net.addConnection(in_to_hidden)
        self.net.addConnection(hidden_to_out)

        "some unknown but necessary function :)"
        self.net.sortModules()

        print self.net

        "generate big sized training set"
        trainingSet = SupervisedDataSet(4,2)

        trainArr = self.generate_training_set()
        for ri in range(2000):
            input = ((trainArr[0][ri][0],trainArr[0][ri][1],trainArr[0][ri][2],trainArr[0][ri][3]))
            target = ((trainArr[1][ri][0],trainArr[1][ri][1]))
            trainingSet.addSample(input, target)

        "create backpropogation trainer"
        t = BackpropTrainer(self.net,d,learningrate=0.00001, momentum=0.99)
        while True:
            globErr = t.train()
            print "global error:", globErr
            if globErr < 0.0001:
                break

        return self.net
Example #11
0
    def __init__(self, x_dim, y_dim, hidden_size, s_id):
        self.serialize_id = s_id
        self.net = FeedForwardNetwork()

        in_layer = LinearLayer(x_dim)
        hidden_layer = SigmoidLayer(hidden_size)
        out_layer = LinearLayer(y_dim)
        self.net.addInputModule(in_layer)
        self.net.addModule(hidden_layer)
        self.net.addOutputModule(out_layer)

        in_to_hidden = FullConnection(in_layer, hidden_layer)
        hidden_to_out = FullConnection(hidden_layer, out_layer)
        self.net.addConnection(in_to_hidden)
        self.net.addConnection(hidden_to_out)

        self.net.sortModules()
Example #12
0
 def _generate_pybrain_network(self):
     # make network
     self._pybrain_network = FeedForwardNetwork()
     # make layers
     self._in_layer = LinearLayer(self.n_input_neurons, name='in')
     self._hidden_layer = SigmoidLayer(self.n_hidden_neurons, name='hidden')
     self._out_layer = LinearLayer(self.n_output_neurons, name='out')
     self._bias_neuron = BiasUnit(name='bias')
     # make connections between layers
     self._in_hidden_connection = FullConnection(self._in_layer, self._hidden_layer)
     self._hidden_out_connection = FullConnection(self._hidden_layer, self._out_layer)
     self._bias_hidden_connection = FullConnection(self._bias_neuron, self._hidden_layer)
     self._bias_out_connection = FullConnection(self._bias_neuron, self._out_layer)
     # add modules to network
     self._pybrain_network.addInputModule(self._in_layer)
     self._pybrain_network.addModule(self._hidden_layer)
     self._pybrain_network.addOutputModule(self._out_layer)
     self._pybrain_network.addModule(self._bias_neuron)
     # add connections to network
     for c in (self._in_hidden_connection, self._hidden_out_connection, self._bias_hidden_connection, self._bias_out_connection):
         self._pybrain_network.addConnection(c)
     # initialize network with added modules/connections
     self._pybrain_network.sortModules()
def buildSharedCrossedNetwork():
    """ build a network with shared connections. Two hidden modules are
    symmetrically linked, but to a different input neuron than the
    output neuron. The weights are random. """
    N = FeedForwardNetwork('shared-crossed')
    h = 1
    a = LinearLayer(2, name = 'a')
    b = LinearLayer(h, name = 'b')
    c = LinearLayer(h, name = 'c')
    d = LinearLayer(2, name = 'd')
    N.addInputModule(a)
    N.addModule(b)
    N.addModule(c)
    N.addOutputModule(d)

    m1 = MotherConnection(h)
    m1.params[:] = scipy.array((1,))

    m2 = MotherConnection(h)
    m2.params[:] = scipy.array((2,))

    N.addConnection(SharedFullConnection(m1, a, b, inSliceTo = 1))
    N.addConnection(SharedFullConnection(m1, a, c, inSliceFrom = 1))
    N.addConnection(SharedFullConnection(m2, b, d, outSliceFrom = 1))
    N.addConnection(SharedFullConnection(m2, c, d, outSliceTo = 1))
    N.sortModules()
    return N
Example #14
0
ds.addSample((1, 1), (0, ))

for input, target in ds:
    print(input, target)

#define layers and connections
inLayer = LinearLayer(2)
hiddenLayerOne = SigmoidLayer(4, "one")
hiddenLayerTwo = SigmoidLayer(4, "two")
outLayer = LinearLayer(1)
inToHiddenOne = FullConnection(inLayer, hiddenLayerOne)
hiddenOneToTwo = FullConnection(hiddenLayerOne, hiddenLayerTwo)
hiddenTwoToOut = FullConnection(hiddenLayerTwo, outLayer)

#wire the layers and connections to a net
net = FeedForwardNetwork()
net.addInputModule(inLayer)
net.addModule(hiddenLayerOne)
net.addModule(hiddenLayerTwo)
net.addOutputModule(outLayer)
net.addConnection(inToHiddenOne)
net.addConnection(hiddenOneToTwo)
net.addConnection(hiddenTwoToOut)
net.sortModules()

print(net)

trainer = BackpropTrainer(net, ds)

for i in range(20):
    for j in range(1000):
Example #15
0
    def buildBMTrainer(self):
        x, y = self.readexcel()
        # 模拟size条数据:
        # self.writeexcel(size=100)
        # resx=contrib(x,0.9)
        # print '**********************'
        # print resx
        # x1=x[:,[3,4,5,6,7,8,9,10,11,0,1,2]]
        # resx1=contrib(x1)
        # print '**********************'
        # print resx1

        self.realy = y
        per = int(len(x))
        # 对数据进行归一化处理(一般来说使用Sigmoid时一定要归一化)
        self.sx = MinMaxScaler()
        self.sy = MinMaxScaler()

        xTrain = x[:per]
        xTrain = self.sx.fit_transform(xTrain)
        yTrain = y[:per]
        yTrain = self.sy.fit_transform(yTrain)

        # 初始化前馈神经网络
        self.__fnn = FeedForwardNetwork()

        # 构建输入层,隐藏层和输出层,一般隐藏层为3-5层,不宜过多
        inLayer = LinearLayer(x.shape[1], 'inLayer')
        hiddenLayer0 = SigmoidLayer(int(self.hiddendim / 3), 'hiddenLayer0')
        hiddenLayer1 = TanhLayer(self.hiddendim, 'hiddenLayer1')
        hiddenLayer2 = SigmoidLayer(int(self.hiddendim / 3), 'hiddenLayer2')
        outLayer = LinearLayer(self.rescol, 'outLayer')

        # 将构建的输出层、隐藏层、输出层加入到fnn中
        self.__fnn.addInputModule(inLayer)
        self.__fnn.addModule(hiddenLayer0)
        self.__fnn.addModule(hiddenLayer1)
        self.__fnn.addModule(hiddenLayer2)
        self.__fnn.addOutputModule(outLayer)

        # 对各层之间建立完全连接
        in_to_hidden = FullConnection(inLayer, hiddenLayer0)
        hidden_to_hidden0 = FullConnection(hiddenLayer0, hiddenLayer1)
        hidden_to_hidden1 = FullConnection(hiddenLayer1, hiddenLayer2)
        hidden_to_out = FullConnection(hiddenLayer2, outLayer)

        # 与fnn建立连接
        self.__fnn.addConnection(in_to_hidden)
        self.__fnn.addConnection(hidden_to_hidden0)
        self.__fnn.addConnection(hidden_to_hidden1)
        self.__fnn.addConnection(hidden_to_out)
        self.__fnn.sortModules()
        # 初始化监督数据集
        DS = SupervisedDataSet(x.shape[1], self.rescol)

        # 将训练的数据及标签加入到DS中
        # for i in range(len(xTrain)):
        #     DS.addSample(xTrain[i], yTrain[i])
        for i in range(len(xTrain)):
            DS.addSample(xTrain[i], yTrain[i])

        # 采用BP进行训练,训练至收敛,最大训练次数为1000
        trainer = BMBackpropTrainer(self.__fnn,
                                    DS,
                                    learningrate=0.0001,
                                    verbose=self.verbose)
        if self.myalg:
            trainingErrors = trainer.bmtrain(maxEpochs=10000,
                                             verbose=True,
                                             continueEpochs=3000,
                                             totalError=0.0001)
        else:
            trainingErrors = trainer.trainUntilConvergence(
                maxEpochs=10000, continueEpochs=3000, validationProportion=0.1)
        # CV = CrossValidator(trainer, DS, n_folds=4, valfunc=ModuleValidator.MSE)
        # CV.validate()
        # CrossValidator
        # trainingErrors = trainer.trainUntilConvergence(maxEpochs=10000,continueEpochs=5000, validationProportion=0.1)
        # self.finalError = trainingErrors[0][-2]
        # self.finalerror=trainingErrors[0][-2]
        # if (self.verbose):
        #     print '最后总体容差:', self.finalError
        self.__sy = self.sy
        self.__sx = self.sx
        for i in range(len(xTrain)):
            a = self.sy.inverse_transform(
                self.__fnn.activate(xTrain[i]).reshape(-1, 1))
            self.restest.append(
                self.sy.inverse_transform(
                    self.__fnn.activate(xTrain[i]).reshape(-1, 1))[0][0])
Example #16
0
def _buildNetwork(*layers, **options):
    """This is a helper function to create different kinds of networks.

    `layers` is a list of tuples. Each tuple can contain an arbitrary number of
    layers, each being connected to the next one with IdentityConnections. Due
    to this, all layers have to have the same dimension. We call these tuples
    'parts.'

    Afterwards, the last layer of one tuple is connected to the first layer of
    the following tuple by a FullConnection.

    If the keyword argument bias is given, BiasUnits are added additionally with
    every FullConnection.

    Example:

        _buildNetwork(
            (LinearLayer(3),),
            (SigmoidLayer(4), GaussianLayer(4)),
            (SigmoidLayer(3),),
        )
    """
    bias = options['bias'] if 'bias' in options else False

    net = FeedForwardNetwork()
    layerParts = iter(layers)
    firstPart = iter(layerParts.next())
    firstLayer = firstPart.next()
    net.addInputModule(firstLayer)

    prevLayer = firstLayer

    for part in chain(firstPart, layerParts):
        new_part = True
        for layer in part:
            net.addModule(layer)
            # Pick class depending on whether we entered a new part
            if new_part:
                ConnectionClass = FullConnection
                if bias:
                    biasUnit = BiasUnit('BiasUnit for %s' % layer.name)
                    net.addModule(biasUnit)
                    net.addConnection(FullConnection(biasUnit, layer))
            else:
                ConnectionClass = IdentityConnection
            new_part = False
            conn = ConnectionClass(prevLayer, layer)
            net.addConnection(conn)
            prevLayer = layer
    net.addOutputModule(layer)
    net.sortModules()
    return net
Example #17
0
def buildnet(modules):
    net = FeedForwardNetwork(name='mynet');
    net.addInputModule(modules['in'])
    net.addModule(modules['hidden'])
    net.addOutputModule(modules['out'])
    net.addModule(modules['bias'])
    net.addConnection(modules['in_to_hidden'])
    net.addConnection(modules['bias_to_hidden'])
    net.addConnection(modules['bias_to_out'])
    if ('hidden2' in modules):
        net.addModule(modules['hidden2'])
        net.addConnection(modules['hidden_to_hidden2'])
        net.addConnection(modules['bias_to_hidden2'])
        net.addConnection(modules['hidden2_to_out'])
    else:
        net.addConnection(modules['hidden_to_out'])
    net.sortModules()
    return net
Example #18
0
    def __init__(self, states, verbose=False, max_epochs=None):
        '''Create a NeuralNetwork instance.

        `states` is a tuple of tuples of ints, representing the discovered subnetworks'
        entrez ids.
        '''
        self.verbose         = verbose
        self.max_epochs      = max_epochs
        self.num_features    = sum(map(lambda tup: len(tup), states))
        self.states          = states

        n = FeedForwardNetwork()
        n.addOutputModule(TanhLayer(1, name='out'))
        n.addModule(BiasUnit(name='bias out'))
        n.addConnection(FullConnection(n['bias out'], n['out']))

        for i, state in enumerate(states):
            dim = len(state)
            n.addInputModule(TanhLayer(dim, name='input %s' % i))
            n.addModule(BiasUnit(name='bias input %s' % i))
            n.addConnection(FullConnection(n['bias input %s' % i], n['input %s' % i]))
            n.addConnection(FullConnection(n['input %s' % i], n['out']))

        n.sortModules()
        self.n = n
def _build_network():
    logger.info("Building network...")

    net = FeedForwardNetwork()
    inp = LinearLayer(IMG_WIDTH * IMG_HEIGHT * 2)
    h1_image_width = IMG_WIDTH - FIRST_CONVOLUTION_FILTER + 1
    h1_image_height = IMG_HEIGHT - FIRST_CONVOLUTION_FILTER + 1
    h1_full_width = h1_image_width * CONVOLUTION_MULTIPLIER * NUMBER_OF_IMAGES
    h1_full_height = h1_image_height * CONVOLUTION_MULTIPLIER
    h1 = SigmoidLayer(h1_full_width * h1_full_height)

    h2_width = h1_full_width / 2
    h2_height = h1_full_height / 2
    h2 = LinearLayer(h2_width * h2_height)

    h3_image_width = h2_width / CONVOLUTION_MULTIPLIER / NUMBER_OF_IMAGES - SECOND_CONVOLUTION_FILTER + 1
    h3_image_height = h2_height / CONVOLUTION_MULTIPLIER - SECOND_CONVOLUTION_FILTER + 1
    h3_full_width = h3_image_width * (CONVOLUTION_MULTIPLIER * 2) * NUMBER_OF_IMAGES
    h3_full_height = h3_image_height * (CONVOLUTION_MULTIPLIER * 2)
    h3 = SigmoidLayer(h3_full_width * h3_full_height)

    h4_full_width = h3_image_width - MERGE_FILTER
    h4_full_height = h3_image_height - MERGE_FILTER
    h4 = SigmoidLayer(h4_full_width * h4_full_height)

    logger.info("BASE IMG: %d x %d" % (IMG_WIDTH, IMG_HEIGHT))
    logger.info("First layer IMG: %d x %d" % (h1_image_width, h1_image_height))
    logger.info("First layer FULL: %d x %d" % (h1_full_width, h1_full_height))
    logger.info("Second layer FULL: %d x %d" % (h2_width, h2_height))
    logger.info("Third layer IMG: %d x %d" % (h3_image_width, h3_image_height))
    logger.info("Third layer FULL: %d x %d" % (h3_full_width, h3_full_height))
    logger.info("Forth layer FULL: %d x %d" % (h3_image_width, h3_image_height))
    outp = SoftmaxLayer(2)

    h5 = SigmoidLayer(h4_full_width * h4_full_height)

    # add modules
    net.addOutputModule(outp)
    net.addInputModule(inp)
    net.addModule(h1)
    net.addModule(h2)
    net.addModule(h3)
    net.addModule(h4)
    net.addModule(h5)

    # create connections

    for i in range(NUMBER_OF_IMAGES):
        _add_convolutional_connection(
            net=net,
            h1=inp,
            h2=h1,
            filter_size=FIRST_CONVOLUTION_FILTER,
            multiplier=CONVOLUTION_MULTIPLIER,
            input_width=IMG_WIDTH * 2,
            input_height=IMG_HEIGHT,
            output_width=h1_full_width,
            output_height=h1_full_height,
            offset_x=h1_image_width * i,
            offset_y=0,
            size_x=h1_image_width,
            size_y=h1_image_height
        )

    _add_pool_connection(
        net=net,
        h1=h1,
        h2=h2,
        input_width=h1_full_width,
        input_height=h1_full_height
    )

    for i in range(NUMBER_OF_IMAGES * CONVOLUTION_MULTIPLIER):
        for j in range(CONVOLUTION_MULTIPLIER):
            _add_convolutional_connection(
                net=net,
                h1=h2,
                h2=h3,
                filter_size=SECOND_CONVOLUTION_FILTER,
                multiplier=CONVOLUTION_MULTIPLIER,
                input_width=h2_width,
                input_height=h2_height,
                output_width=h3_full_width,
                output_height=h3_full_height,
                offset_x=h3_image_width * i,
                offset_y=h3_image_height * j,
                size_x=h3_image_width,
                size_y=h3_image_height
            )

    _merge_connection(
        net=net,
        h1=h3,
        h2=h4,
        filter_size=MERGE_FILTER,
        input_width=h3_full_width,
        input_height=h3_full_height,
        output_width=h4_full_width,
        output_height=h4_full_height
    )

    net.addConnection(FullConnection(h4, h5))
    net.addConnection(FullConnection(h5, outp))

    # finish up
    net.sortModules()
    logger.info("Done building network")
    return net
from pybrain.structure.modules.sigmoidlayer import SigmoidLayer
from pybrain.structure.networks.feedforward import FeedForwardNetwork
from pybrain.structure.networks.recurrent import RecurrentNetwork
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.tools.customxml import NetworkWriter

import handWrittenRecognition
import MNIST_Data
# create an Object to get the data source
dataObject = MNIST_Data.MNIST_Processing()
traininglist = dataObject.neural_data_set
traininglabels = dataObject.neural_label_set

# step1
#create neural network
fnn = FeedForwardNetwork()

#set three layers, input+ hidden layer+ output  28*28=784

# the first feature extraction
#inLayer = LinearLayer(784,name='inLayer')
# the second feature extraction
inLayer = LinearLayer(28, name='inLayer')
hiddenLayer = SigmoidLayer(30, name='hiddenLayer0')
outLayer = LinearLayer(10, name='outLayer')

#There are a couple of different classes of layers. For a complete list check out the modules package.

#add these three Layers into neural network
fnn.addInputModule(inLayer)
fnn.addModule(hiddenLayer)
Example #21
0
def main():
    a = 0
    for i in range(0, 100):
        inLayer = SigmoidLayer(2)
        hiddenLayer = SigmoidLayer(3)
        outLayer = SigmoidLayer(1)

        net = FeedForwardNetwork()
        net.addInputModule(inLayer)
        net.addModule(hiddenLayer)
        net.addOutputModule(outLayer)

        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_out = FullConnection(hiddenLayer, outLayer)

        net.addConnection(in_to_hidden)
        net.addConnection(hidden_to_out)

        net.sortModules()

        ds = SupervisedDataSet(2, 1)
        ds.addSample((1, 1), (0))
        ds.addSample((1, 0), (1))
        ds.addSample((0, 1), (1))
        ds.addSample((0, 0), (0))

        trainer = BackpropTrainer(net, ds)
        trainer.trainUntilConvergence()

        out = net.activate((1, 1))
        if (out < 0.5):
            a = a + 1
    print(str(a) + "/100")
Example #22
0
from pybrain.datasets.supervised import SupervisedDataSet
from pybrain.structure.connections.full import FullConnection
from pybrain.structure.modules.linearlayer import LinearLayer
from pybrain.structure.modules.sigmoidlayer import SigmoidLayer
from pybrain.structure.networks.feedforward import FeedForwardNetwork
from pybrain.supervised.trainers.backprop import BackpropTrainer

network = FeedForwardNetwork()  # create network
inputLayer = SigmoidLayer(1)  # maybe LinearLayer ?
hiddenLayer = SigmoidLayer(4)
outputLayer = SigmoidLayer(1)  # maybe LinearLayer ?

network.addInputModule(inputLayer)
network.addModule(hiddenLayer)
network.addOutputModule(outputLayer)
# Connection
network.addConnection(FullConnection(inputLayer, hiddenLayer))
network.addConnection(FullConnection(hiddenLayer, outputLayer))

network.sortModules()

dataTrain = SupervisedDataSet(1, 1)  # input, target
dataTrain.addSample(
    1, 0.76
)  # it seems to me that input(our x), target(value y) from function sin(x)*sin(2*x)

trainer = BackpropTrainer(
    network, dataTrain)  # it's back prop, we use our network and our data
print(trainer.train())  # i think it's value trained

print(network.params)  # i think that are wights
Example #23
0
 def buildIris(self):
     self.params['dataset'] = 'iris'
     self.trn_data, self.tst_data = pybrainData(0.5)
     global trn_data
     trn_data = self.trn_data
     nn = FeedForwardNetwork()
     inLayer = TanhLayer(4, name='in')
     hiddenLayer = TanhLayer(6, name='hidden0')
     outLayer = ThresholdLayer(3, name='out')
     nn.addInputModule(inLayer)
     nn.addModule(hiddenLayer)
     nn.addOutputModule(outLayer)
     in_to_hidden = FullConnection(inLayer, hiddenLayer)
     hidden_to_out = FullConnection(hiddenLayer, outLayer)
     nn.addConnection(in_to_hidden)
     nn.addConnection(hidden_to_out)
     nn.sortModules()
     nn.randomize()
     self.net_settings = str(nn.connections)
     self.nn = nn
Example #24
0
 def buildParity(self):
     self.params['dataset'] = 'parity'
     self.trn_data = ParityDataSet(nsamples=75)
     self.trn_data.setField('class', self.trn_data['target'])
     self.tst_data = ParityDataSet(nsamples=75)
     global trn_data
     trn_data = self.trn_data
     nn = FeedForwardNetwork()
     inLayer = TanhLayer(4, name='in')
     hiddenLayer = TanhLayer(6, name='hidden0')
     outLayer = ThresholdLayer(1, name='out')
     nn.addInputModule(inLayer)
     nn.addModule(hiddenLayer)
     nn.addOutputModule(outLayer)
     in_to_hidden = FullConnection(inLayer, hiddenLayer)
     hidden_to_out = FullConnection(hiddenLayer, outLayer)
     nn.addConnection(in_to_hidden)
     nn.addConnection(hidden_to_out)
     nn.sortModules()
     nn.randomize()
     self.net_settings = str(nn.connections)
     self.nn = nn
Example #25
0
 def buildXor(self):
     self.params['dataset'] = 'XOR'
     d = ClassificationDataSet(2)
     d.addSample([0., 0.], [0.])
     d.addSample([0., 1.], [1.])
     d.addSample([1., 0.], [1.])
     d.addSample([1., 1.], [0.])
     d.setField('class', [[0.], [1.], [1.], [0.]])
     self.trn_data = d
     self.tst_data = d
     global trn_data
     trn_data = self.trn_data
     nn = FeedForwardNetwork()
     inLayer = TanhLayer(2, name='in')
     hiddenLayer = TanhLayer(3, name='hidden0')
     outLayer = ThresholdLayer(1, name='out')
     nn.addInputModule(inLayer)
     nn.addModule(hiddenLayer)
     nn.addOutputModule(outLayer)
     in_to_hidden = FullConnection(inLayer, hiddenLayer)
     hidden_to_out = FullConnection(hiddenLayer, outLayer)
     nn.addConnection(in_to_hidden)
     nn.addConnection(hidden_to_out)
     nn.sortModules()
     nn.randomize()
     self.net_settings = str(nn.connections)
     self.nn = nn
Example #26
0
class Network:
    "NETwhisperer neural network"
        
    def phoneme_to_layer(self, phoneme):
        return self.phonemes_to_layers[phoneme]

    def layer_to_phoneme(self, layer):
        def cos_to_input(item):
            phoneme, phoneme_layer = item
            return _cos(layer,phoneme_layer)
        # minimum angle should be maximum cos    
        return max(self.phonemes_to_layers.iteritems(), key=cos_to_input)[0]    

    def __init__(self, window_size, window_middle, n_hidden_neurons):
        self.window_size = window_size
        self.window_middle = window_middle
        self.n_hidden_neurons = n_hidden_neurons
        self.n_trainings = 0
        self.training_errors = []
        self._init_layers()
        self._generate_pybrain_network()
        
    def _init_layers(self):
        # one neuron for each window/letter combination
        self.letter_neuron_names = list(product(range(self.window_size), corpus.all_letters))
        # one neuron for each phoneme trait
        self.phoneme_trait_neuron_names = list(corpus.all_phoneme_traits)
        # neuron counts
        self.n_input_neurons = len(self.letter_neuron_names)
        self.n_output_neurons = len(self.phoneme_trait_neuron_names)        
        # mapping from (pos, letter) to input neuron index
        self.letters_to_neurons = dict({(pos_and_letter, index) for index, pos_and_letter in enumerate(self.letter_neuron_names)})
        # mapping from trait to neuron
        self.traits_to_neurons = dict({(trait, index) for index, trait in enumerate(self.phoneme_trait_neuron_names)})
        # mapping from phoneme to layer
        self.phonemes_to_layers = {}
        for (phoneme, traits) in corpus.phoneme_traits.iteritems():
            layer = zeros(self.n_output_neurons)
            for trait in traits:
                index = self.traits_to_neurons[trait]
                layer[index] = 1
            self.phonemes_to_layers[phoneme] = layer
            
    def _generate_pybrain_network(self):
        # make network
        self._pybrain_network = FeedForwardNetwork()
        # make layers
        self._in_layer = LinearLayer(self.n_input_neurons, name='in')
        self._hidden_layer = SigmoidLayer(self.n_hidden_neurons, name='hidden')
        self._out_layer = LinearLayer(self.n_output_neurons, name='out')
        self._bias_neuron = BiasUnit(name='bias')
        # make connections between layers
        self._in_hidden_connection = FullConnection(self._in_layer, self._hidden_layer)
        self._hidden_out_connection = FullConnection(self._hidden_layer, self._out_layer)
        self._bias_hidden_connection = FullConnection(self._bias_neuron, self._hidden_layer)
        self._bias_out_connection = FullConnection(self._bias_neuron, self._out_layer)
        # add modules to network
        self._pybrain_network.addInputModule(self._in_layer)
        self._pybrain_network.addModule(self._hidden_layer)
        self._pybrain_network.addOutputModule(self._out_layer)
        self._pybrain_network.addModule(self._bias_neuron)
        # add connections to network
        for c in (self._in_hidden_connection, self._hidden_out_connection, self._bias_hidden_connection, self._bias_out_connection):
            self._pybrain_network.addConnection(c)
        # initialize network with added modules/connections
        self._pybrain_network.sortModules()

    def windowIter(self, letters):
        assert type(letters) == str
        padding_before = ' ' * self.window_middle
        padding_after = ' ' * (self.window_size - self.window_middle - 1)
        padded_letters = padding_before + letters + padding_after
        # for each letter in the sample
        for l_num in range(len(letters)):
            letters_window = padded_letters[l_num:l_num+self.window_size]
            yield letters_window    

    def generateSamples(self, letters, phonemes):
        assert len(letters) == len(phonemes)
        for (letters_window, current_phoneme) in izip(self.windowIter(letters), phonemes):
            yield self.letters_to_layer(letters_window), self.phoneme_to_layer(current_phoneme)

    def letters_to_layer(self, letters):
        assert len(letters) == self.window_size
        # start with empty layer
        layer = zeros(self.n_input_neurons)
        # loop through letters and activate each neuron
        for (pos, letter) in enumerate(letters):
            index = self.letters_to_neurons[(pos, letter)]
            layer[index] = 1
        return layer
        
    def train(self, training_set, n_epochs=1, callback=None):
        # build dataset
        dataset = DataSet(self.n_input_neurons, self.n_output_neurons)
        for (ltr,ph) in training_set:
            for sample in self.generateSamples(ltr,ph):
                dataset.addSample(*sample)
        # build trainer
        trainer = Trainer(self._pybrain_network, dataset, 0.01, 1.0, 0.9)
        for i in xrange(n_epochs):
            # run callback if present
            if callback: callback()
            # train network
            error = trainer.train()
            # record training errors
            self.n_trainings = self.n_trainings + 1
            self.training_errors.append(error)
            
    def getInputHiddenWeights(self):
        return self._in_hidden_connection.params.reshape((self.n_hidden_neurons, self.n_input_neurons))
        
    def getHiddenOutputWeights(self):
        return self._hidden_out_connection.params.reshape((self.n_output_neurons, self.n_hidden_neurons))

    def getHiddenThresholds(self):
        return self._bias_hidden_connection.params
        
    def getOutputThresholds(self):
        return self._bias_out_connection.params
        
    def lettersToPhonemesWithAngles(self, letters, expected_phonemes):
        for (window, exp_ph) in izip(self.windowIter(letters), expected_phonemes):
            input_layer = self.letters_to_layer(window)
            output_layer = self._pybrain_network.activate(input_layer)
            phoneme = self.layer_to_phoneme(output_layer)
            angle = _angle(output_layer, self.phoneme_to_layer(exp_ph))
            yield (phoneme, angle)

    def lettersToPhonemes(self, letters):
        for window in self.windowIter(letters):
            input_layer = self.letters_to_layer(window)
            output_layer = self._pybrain_network.activate(input_layer)
            phoneme = self.layer_to_phoneme(output_layer)
            yield phoneme
    
    def addRandomWeights(self, rand_fn):
         cons = (self._in_hidden_connection, self._hidden_out_connection)
         for c in cons:
             for i in xrange(len(c.params)):
                c.params[i] += rand_fn()
def main():
    a = 0
    for i in range(0,100):
        inLayer = SigmoidLayer(2)
        hiddenLayer = SigmoidLayer(3)
        outLayer = SigmoidLayer(1)
        
        net = FeedForwardNetwork()
        net.addInputModule(inLayer)
        net.addModule(hiddenLayer)
        net.addOutputModule(outLayer)
        
        in_to_hidden = FullConnection(inLayer,hiddenLayer)
        hidden_to_out = FullConnection(hiddenLayer,outLayer)
        
        net.addConnection(in_to_hidden)
        net.addConnection(hidden_to_out)
        
        net.sortModules()
        
        ds = SupervisedDataSet(2,1)
        ds.addSample((1,1), (0))
        ds.addSample((1,0), (1))
        ds.addSample((0,1), (1))
        ds.addSample((0,0), (0))
        
        trainer = BackpropTrainer(net,ds)
        trainer.trainUntilConvergence()
        
        out = net.activate((1,1))
        if (out < 0.5):
            a = a + 1
    print(str(a) + "/100")
Example #28
0
def custom_build_network(layer_sizes):
    net = FeedForwardNetwork()

    layers = []
    inp = SigmoidLayer(layer_sizes[0], name='visible')
    h1 = SigmoidLayer(layer_sizes[1], name='hidden1')
    h2 = SigmoidLayer(layer_sizes[2], name='hidden2')
    out = SigmoidLayer(layer_sizes[3], name='out')
    bias = BiasUnit(name='bias')

    net.addInputModule(inp)
    net.addModule(h1)
    net.addModule(h2)
    net.addOutputModule(out)
    net.addModule(bias)

    net.addConnection(FullConnection(inp, h1))
    net.addConnection(FullConnection(h1, h2))
    net.addConnection(FullConnection(h2, out))

    net.addConnection(FullConnection(bias, h1))
    net.addConnection(FullConnection(bias, h2))
    net.addConnection(FullConnection(bias, out))

    net.sortModules()
    return net
from pybrain.supervised.trainers.backprop import BackpropTrainer
from pybrain.tools.customxml.networkwriter import NetworkWriter
from pybrain.structure.networks.feedforward import FeedForwardNetwork
from pybrain.structure.modules.linearlayer import LinearLayer
from pybrain.structure.modules.sigmoidlayer import SigmoidLayer
from pybrain.structure.connections.full import FullConnection
from pybrain.structure.modules.biasunit import BiasUnit

# Next we transform the data into a vectorized format so that it can be used as a training set
aramdata = open("ARAMData.txt","r")

#ChampionDictionary holds all the riot static data about each champion. The Riot IDs are the keys of the dictionary
championdictionary = DatabaseActions.CreateChampionDictionary()

#Creates a Neural Network of Appropriate size
predictionNet = FeedForwardNetwork()

inLayer = LinearLayer(len(championdictionary))
hiddenLayer = SigmoidLayer(5)
outLayer = SigmoidLayer(1)

predictionNet.addInputModule(inLayer)
predictionNet.addModule(hiddenLayer)
predictionNet.addOutputModule(outLayer)
predictionNet.addModule(BiasUnit(name = 'bias'))

in_to_hidden = FullConnection(inLayer,hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer,outLayer)

predictionNet.addConnection(in_to_hidden)
predictionNet.addConnection(hidden_to_out)
Example #30
0
class MLP:

    data = SupervisedDataSet
    net = FeedForwardNetwork

    def generate_training_set(self):
        random.seed()
        ind = floor(empty((2000,4)))
        outd = floor(empty((2000, 2)))

        res = array((ind,outd))

        print ind
        print
        print outd
        print
        print res

        for i in range(2000):
            n = random.getrandbits(1)
            if n == 0:
                a = random.randint(0,100)
                b = random.randint(0,100)
                c = random.randint(100,5000)
                d = random.randint(100,5000)
                res[0][i][0] = a
                res[0][i][1] = b
                res[0][i][2] = c
                res[0][i][3] = d

                res[1][i][0] = 0
                res[1][i][1] = 1

            else:
                a = random.randint(100,5000)
                b = random.randint(100,5000)
                c = random.randint(0,100)
                d = random.randint(0,100)
                res[0][i][0] = a
                res[0][i][1] = b
                res[0][i][2] = c
                res[0][i][3] = d

                res[1][i][0] = 1
                res[1][i][1] = 0

        for i in range(2000):
            print res[0][i][0],res[0][i][1],res[0][i][2],res[0][i][3], " out", res[1][i][0],res[1][i][1]
        return res

    def getFullDataSet(self):
        res = zeros((50**4, 4))
        a = 0
        b = 0
        c = 0
        d = 0
        for i in range(len(res)):
            if (a % 50 == 0):
                a = 0
            a = a + 1
            if (i % 2 == 0):
                if (b % 50 == 0):
                    b = 0
                b = b + 1

            if (i % 4 == 0):
                if (c % 50 == 0):
                    c = 0
                c = c + 1
            if (i % 8 ==0):
                if (d % 50 == 0):
                    d = 0
                d = d + 1
            res[i][0] = a
            res[i][1] = b
            res[i][2] = c
            res[i][3] = d

        res += 75

        return res

    def make_dataset(self):
        """
        Creates a set of training data with 2-dimensioanal input and 2-dimensional output
        So how dataset have to be looks like?
        """
        self.data = SupervisedDataSet(4,2)

        self.data.addSample((1,1,150,150),(0,1))
        self.data.addSample((1,1,199,142),(0,1))
        self.data.addSample((150,120,43,12),(1,0))
        self.data.addSample((198,123,54,65),(1,0))

        return self.data


    def training(self,d):
        """
        Builds a network ,trains and returns it
        """

        self.net = FeedForwardNetwork()

        inLayer = LinearLayer(4) # 4 inputs
        hiddenLayer = SigmoidLayer(3) # 5 neurons on hidden layer with sigmoid function
        outLayer = LinearLayer(2) # 2 neuron as output layer


        "add layers to NN"
        self.net.addInputModule(inLayer)
        self.net.addModule(hiddenLayer)
        self.net.addOutputModule(outLayer)

        "create connections"
        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_out = FullConnection(hiddenLayer, outLayer)

        "add connections"
        self.net.addConnection(in_to_hidden)
        self.net.addConnection(hidden_to_out)

        "some unknown but necessary function :)"
        self.net.sortModules()

        print self.net

        "generate big sized training set"
        trainingSet = SupervisedDataSet(4,2)

        trainArr = self.generate_training_set()
        for ri in range(2000):
            input = ((trainArr[0][ri][0],trainArr[0][ri][1],trainArr[0][ri][2],trainArr[0][ri][3]))
            target = ((trainArr[1][ri][0],trainArr[1][ri][1]))
            trainingSet.addSample(input, target)

        "create backpropogation trainer"
        t = BackpropTrainer(self.net,d,learningrate=0.00001, momentum=0.99)
        while True:
            globErr = t.train()
            print "global error:", globErr
            if globErr < 0.0001:
                break

        return self.net


    def test(self,trained):
        """
        Builds a new test dataset and tests the trained network on it.
        """

        testArr = self.generate_training_set()
        for i in range(2000):
            print floor(testArr[0][i]),floor(testArr[1][i])


    def exportWeights(self, fileName):
        fileObject = open(fileName, 'w')
        pickle.dump(self.net, fileObject)
        fileObject.close()

    def importWeights(self, fileName):
        fileObject = open(fileName, 'r')
        self.net = pickle.load(fileObject)
        fileObject.close()
        return self.net

    def run(self):

        import __root__

        """
        Use this function to run build, train, and test your neural network.
        """

        trained = self.importWeights(__root__.path()+'/res/weights')
        # self.test(trained)
        # return
        import matplotlib.pyplot as plt

        value = 150
        plt.figure(1)
        plt.title("["+str(value)+",50"+",x,"+"y"+"]")
        for i in range(50,500, 5):
            print i
            for j in range(50, 500, 5):
                color = 'black'

                if np.around(trained.activate([value,50,i,j]))[0] == np.float32(1.0):
                    color = 'red'
                else:
                    color = 'blue'

                x = i
                y = j
                plt.scatter(x,y,c=color,s = 20, label = color, alpha=0.9, edgecolor = 'none')
        plt.grid(True)

        plt.figure(2)
        plt.title("["+str(value)+",100"+",x,"+"y"+"]")
        for i in range(50,500, 5):
            print i
            for j in range(50, 500, 5):
                color = 'black'

                if np.around(trained.activate([value,100,i,j]))[0] == np.float32(1.0):
                    color = 'red'
                else:
                    color = 'blue'

                x = i
                y = j
                plt.scatter(x,y,c=color,s = 20, label = color, alpha=0.9, edgecolor = 'none')
        plt.grid(True)

        plt.figure(3)
        plt.title("["+str(value)+",150"+",x,"+"y"+"]")
        for i in range(50,500, 5):
            print i
            for j in range(50, 500, 5):
                color = 'black'

                if np.around(trained.activate([value,150,i,j]))[0] == np.float32(1.0):
                    color = 'red'
                else:
                    color = 'blue'

                x = i
                y = j
                plt.scatter(x,y,c=color,s = 20, label = color, alpha=0.9, edgecolor = 'none')
        plt.grid(True)

        plt.show()
 def buildXor(self):
     self.params['dataset'] = 'XOR'
     d = ClassificationDataSet(2)
     d.addSample([0., 0.], [0.])
     d.addSample([0., 1.], [1.])
     d.addSample([1., 0.], [1.])
     d.addSample([1., 1.], [0.])
     d.setField('class', [[0.], [1.], [1.], [0.]])
     self.trn_data = d
     self.tst_data = d
     global trn_data
     trn_data = self.trn_data
     nn = FeedForwardNetwork()
     inLayer = TanhLayer(2, name='in')
     hiddenLayer = TanhLayer(3, name='hidden0')
     outLayer = ThresholdLayer(1, name='out')
     nn.addInputModule(inLayer)
     nn.addModule(hiddenLayer)
     nn.addOutputModule(outLayer)
     in_to_hidden = FullConnection(inLayer, hiddenLayer)
     hidden_to_out = FullConnection(hiddenLayer, outLayer)
     nn.addConnection(in_to_hidden)
     nn.addConnection(hidden_to_out)
     nn.sortModules()
     nn.randomize()
     self.net_settings = str(nn.connections)
     self.nn = nn
Example #32
0
 def __init__(self, inputdim, insize, convSize, numFeatureMaps, **args):
     FeedForwardNetwork.__init__(self, **args)
     inlayer = LinearLayer(inputdim * insize * insize)
     self.addInputModule(inlayer)
     self._buildStructure(inputdim, insize, inlayer, convSize, numFeatureMaps)
     self.sortModules()
 def buildIris(self):
     self.params['dataset'] = 'iris'
     self.trn_data, self.tst_data = pybrainData(0.5)
     global trn_data
     trn_data = self.trn_data
     nn = FeedForwardNetwork()
     inLayer = TanhLayer(4, name='in')
     hiddenLayer = TanhLayer(6, name='hidden0')
     outLayer = ThresholdLayer(3, name='out')
     nn.addInputModule(inLayer)
     nn.addModule(hiddenLayer)
     nn.addOutputModule(outLayer)
     in_to_hidden = FullConnection(inLayer, hiddenLayer)
     hidden_to_out = FullConnection(hiddenLayer, outLayer)
     nn.addConnection(in_to_hidden)
     nn.addConnection(hidden_to_out)
     nn.sortModules()
     nn.randomize()
     self.net_settings = str(nn.connections)
     self.nn = nn
Example #34
0
def _buildNetwork(*layers, **options):
    """This is a helper function to create different kinds of networks.

    `layers` is a list of tuples. Each tuple can contain an arbitrary number of
    layers, each being connected to the next one with IdentityConnections. Due 
    to this, all layers have to have the same dimension. We call these tuples
    'parts.'
    
    Afterwards, the last layer of one tuple is connected to the first layer of 
    the following tuple by a FullConnection.
    
    If the keyword argument bias is given, BiasUnits are added additionally with
    every FullConnection. 

    Example:
    
        _buildNetwork(
            (LinearLayer(3),),
            (SigmoidLayer(4), GaussianLayer(4)),
            (SigmoidLayer(3),),
        )
    """
    bias = options['bias'] if 'bias' in options else False

    net = FeedForwardNetwork()
    layerParts = iter(layers)
    firstPart = iter(layerParts.next())
    firstLayer = firstPart.next()
    net.addInputModule(firstLayer)

    prevLayer = firstLayer

    for part in chain(firstPart, layerParts):
        new_part = True
        for layer in part:
            net.addModule(layer)
            # Pick class depending on wether we entered a new part
            if new_part:
                ConnectionClass = FullConnection
                if bias:
                    biasUnit = BiasUnit('BiasUnit for %s' % layer.name)
                    net.addModule(biasUnit)
                    net.addConnection(FullConnection(biasUnit, layer))
            else:
                ConnectionClass = IdentityConnection
            new_part = False
            conn = ConnectionClass(prevLayer, layer)
            net.addConnection(conn)
            prevLayer = layer
    net.addOutputModule(layer)
    net.sortModules()
    return net
Example #35
0
    def buildTDnetwork(self):
        # create network and modules
        net = FeedForwardNetwork()
        inp = LinearLayer(self.n_input, name="Input")
        h1 = SigmoidLayer(10, name='sigm')
        outp = LinearLayer(1, name='output')
        # add modules
        net.addOutputModule(outp)
        net.addInputModule(inp)
        net.addModule(h1)
        # create connections from input
        net.addConnection(FullConnection(inp, h1, name="input_LSTM"))

        # create connections to output
        net.addConnection(FullConnection(h1, outp, name="LSTM_outp"))

        # finish up
        net.sortModules()
        net.randomize()

        return net
def custom_build_network(layer_sizes):
    net = FeedForwardNetwork()
    
    layers = []
    inp = SigmoidLayer(layer_sizes[0], name = 'visible')
    h1 = SigmoidLayer(layer_sizes[1], name = 'hidden1')
    h2 = SigmoidLayer(layer_sizes[2], name = 'hidden2')
    out = SigmoidLayer(layer_sizes[3], name = 'out')
    bias = BiasUnit(name = 'bias')
    
    net.addInputModule(inp)
    net.addModule(h1)
    net.addModule(h2)
    net.addOutputModule(out)
    net.addModule(bias)
    
    net.addConnection(FullConnection(inp, h1))
    net.addConnection(FullConnection(h1, h2))
    net.addConnection(FullConnection(h2, out))
    
    net.addConnection(FullConnection(bias, h1))
    net.addConnection(FullConnection(bias, h2))
    net.addConnection(FullConnection(bias, out))
    
    
    net.sortModules()
    return net
Example #37
0
import pybrain
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised.trainers.backprop import BackpropTrainer
from pybrain.datasets.supervised import SupervisedDataSet
from BinReader import BinReader
from pybrain.utilities import percentError
from pybrain.datasets.classification import ClassificationDataSet
from pybrain.structure.networks.feedforward import FeedForwardNetwork
from pybrain.structure.modules.sigmoidlayer import SigmoidLayer
from pybrain.structure.modules.linearlayer import LinearLayer
from pybrain.structure.connections.full import FullConnection
from pybrain.tools.xml.networkwriter import NetworkWriter

dim = 381
n = FeedForwardNetwork()
inLayer = LinearLayer(dim)
hiddenLayer = SigmoidLayer(100)
outLayer = LinearLayer(1)

n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addOutputModule(outLayer)

in_to_hidden = FullConnection(inLayer,hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer,outLayer)

n.addConnection(in_to_hidden)
n.addConnection(hidden_to_out)

n.sortModules()
Example #38
0
class BMTrainer:
    # 隐藏层神经元节点数:
    # hiddendim = 3
    # # 读取训练数据源文件:
    # srcname = 'trainer.xlsx'
    # # 存储训练数据文件:
    # destname = 'buildBMTrainer.xml'
    # 源文件中结果列为几列(输出层节点数)
    # rescol = 1
    # 是否显示计算中间迭代过程
    # verbose = True
    # # 总体容差
    # finalerror = 0
    # # restest = []
    # __fnn = None
    # __sy = None

    def __init__(self,
                 _hiddendim=3,
                 _srcnmae='trainer.xlsx',
                 _destxls='trainerdest.xls',
                 _destname='buildBMTrainer'):
        self.hiddendim = _hiddendim
        self.srcname = _srcnmae
        self.destxls = _destxls
        self.destname = _destname
        self.restest = []
        self.rescol = 1
        self.verbose = True
        # 总体容差
        self.finalerror = 0
        # restest = []
        self.__fnn = None
        self.__sy = None
        self.__sx = None
        self.realy = None
        self.weights = []
        self.srcx = []
        self.srcy = []
        self.destx = []
        self.desty = []
        self.sx = None
        self.sy = None
        self.myalg = True
        self.npin = 0

    # 按条件读取excel
    def readexcel(self):
        workbook = xlrd.open_workbook(self.srcname)
        sheet1 = workbook.sheet_by_index(0)
        if (self.verbose):
            print('训练集共:' + str(sheet1.nrows) + '行,' + str(sheet1.ncols) +
                  '列;其中结果为:' + str(self.rescol) + '列')
        self.srcx = []
        self.srcy = []
        if (sheet1.nrows > 1 and sheet1.ncols > self.rescol):
            self.srcx = np.zeros(
                (sheet1.nrows - 1, sheet1.ncols - self.rescol), dtype=np.float)
            self.srcy = np.zeros((sheet1.nrows - 1, self.rescol),
                                 dtype=np.float)
            for i in range(sheet1.nrows - 1):
                for j in range(sheet1.ncols):
                    if (j < sheet1.ncols - self.rescol):
                        self.srcx[i][j] = sheet1.cell(i + 1, j).value
                    else:
                        self.srcy[i][j - sheet1.ncols +
                                     self.rescol] = sheet1.cell(i + 1, j).value
        return self.srcx.copy(), self.srcy.copy()

    def writeexcel(self, x=None, size=0, savexls=''):
        if x == None:
            x = np.array(self.srcx).copy()
        if savexls == '':
            savexls = self.destxls
        if size > 0:
            workbook = xlwt.Workbook()
            worksheet = workbook.add_sheet('dest')
            self.destx = np.zeros((size, len(x[0])), dtype=np.float)
            # 模拟数据行数:
            for i in range(size):
                for j in range(len(x[0])):
                    cellval = round(random.uniform(min(x[:, j]), max(x[:, j])),
                                    3)
                    self.destx[i][j] = cellval
                    worksheet.write(i, j, cellval)
            workbook.save(savexls)

    def testdest(self):
        # 获取测试数据:
        workbook = xlrd.open_workbook(self.destxls)
        sheet1 = workbook.sheet_by_index(0)
        workbookw1 = xlucopy(workbook)
        sheetw1 = workbookw1.get_sheet(0)
        self.destx = np.zeros((sheet1.nrows, sheet1.ncols), dtype=np.float)
        for i in range(sheet1.nrows):
            for j in range(sheet1.ncols):
                self.destx[i][j] = sheet1.cell(i, j).value
        destx1 = self.sx.transform(self.destx)
        for i in range(sheet1.nrows):
            # for j in range(sheet1.ncols):
            testy = self.sy.inverse_transform(
                self.__fnn.activate(destx1[i]).reshape(-1, 1))
            self.desty.append(testy)
            sheetw1.write(i, sheet1.ncols, testy[0][0])
        workbookw1.save(self.destxls)
        maxy = max(self.srcy)
        miny = min(self.srcy)

        pmax = []
        pmin = []
        for i in range(sheet1.nrows):
            pmax.append(maxy)
            pmin.append(miny)
        plt.figure()
        plt.subplot(121)
        plt.plot(np.arange(0, sheet1.nrows),
                 pmax,
                 label='max',
                 color='r',
                 linestyle='--')
        plt.plot(np.arange(0, sheet1.nrows),
                 np.array(self.desty).reshape(-1, 1),
                 label='test',
                 color='b',
                 linestyle=':',
                 marker='|')
        plt.plot(np.arange(0, sheet1.nrows),
                 pmin,
                 label='min',
                 color='k',
                 linestyle='--')
        plt.legend()
        plt.xlabel("PointCount")
        plt.ylabel("Rate")
        print('###################################')
        # for i in self.desty:q
        #     if i<pmin[0]:
        # print self.desty
        # print pmax[0]
        # print pmin[0]
        # print 'max:' + str(np.maximum(self.desty, pmax[0]))
        npmax = [i for i in self.desty if i > pmax[0]]
        # print npmax
        # print len(npmax)

        npin = [i for i in self.desty if (i < pmax[0] and i > pmin[0])]
        # print npin
        # print len(npin)

        npmin = [i for i in self.desty if i < pmin[0]]
        # print npmin
        # print len(npmin)
        print(str(float(len(npmin)) / len(self.desty) * 100),
              '% 小于' + str(pmin[0]))
        self.npin = float(len(npin)) / len(self.desty) * 100
        print(
            str(float(len(npin)) / len(self.desty) * 100) + '% 在所在区间[' +
            str(pmin[0]) + ',' + str(pmax[0]) + ']中')
        print(
            str(float(len(npmax)) / len(self.desty) * 100) + '%  大于' +
            str(pmax[0]))

        # print 'min:' + str(np.minimum(self.desty, pmin[0]))
        print('###################################')
        # plt.show()

    def buildBMTrainer(self):
        x, y = self.readexcel()
        # 模拟size条数据:
        # self.writeexcel(size=100)
        # resx=contrib(x,0.9)
        # print '**********************'
        # print resx
        # x1=x[:,[3,4,5,6,7,8,9,10,11,0,1,2]]
        # resx1=contrib(x1)
        # print '**********************'
        # print resx1

        self.realy = y
        per = int(len(x))
        # 对数据进行归一化处理(一般来说使用Sigmoid时一定要归一化)
        self.sx = MinMaxScaler()
        self.sy = MinMaxScaler()

        xTrain = x[:per]
        xTrain = self.sx.fit_transform(xTrain)
        yTrain = y[:per]
        yTrain = self.sy.fit_transform(yTrain)

        # 初始化前馈神经网络
        self.__fnn = FeedForwardNetwork()

        # 构建输入层,隐藏层和输出层,一般隐藏层为3-5层,不宜过多
        inLayer = LinearLayer(x.shape[1], 'inLayer')
        hiddenLayer0 = SigmoidLayer(int(self.hiddendim / 3), 'hiddenLayer0')
        hiddenLayer1 = TanhLayer(self.hiddendim, 'hiddenLayer1')
        hiddenLayer2 = SigmoidLayer(int(self.hiddendim / 3), 'hiddenLayer2')
        outLayer = LinearLayer(self.rescol, 'outLayer')

        # 将构建的输出层、隐藏层、输出层加入到fnn中
        self.__fnn.addInputModule(inLayer)
        self.__fnn.addModule(hiddenLayer0)
        self.__fnn.addModule(hiddenLayer1)
        self.__fnn.addModule(hiddenLayer2)
        self.__fnn.addOutputModule(outLayer)

        # 对各层之间建立完全连接
        in_to_hidden = FullConnection(inLayer, hiddenLayer0)
        hidden_to_hidden0 = FullConnection(hiddenLayer0, hiddenLayer1)
        hidden_to_hidden1 = FullConnection(hiddenLayer1, hiddenLayer2)
        hidden_to_out = FullConnection(hiddenLayer2, outLayer)

        # 与fnn建立连接
        self.__fnn.addConnection(in_to_hidden)
        self.__fnn.addConnection(hidden_to_hidden0)
        self.__fnn.addConnection(hidden_to_hidden1)
        self.__fnn.addConnection(hidden_to_out)
        self.__fnn.sortModules()
        # 初始化监督数据集
        DS = SupervisedDataSet(x.shape[1], self.rescol)

        # 将训练的数据及标签加入到DS中
        # for i in range(len(xTrain)):
        #     DS.addSample(xTrain[i], yTrain[i])
        for i in range(len(xTrain)):
            DS.addSample(xTrain[i], yTrain[i])

        # 采用BP进行训练,训练至收敛,最大训练次数为1000
        trainer = BMBackpropTrainer(self.__fnn,
                                    DS,
                                    learningrate=0.0001,
                                    verbose=self.verbose)
        if self.myalg:
            trainingErrors = trainer.bmtrain(maxEpochs=10000,
                                             verbose=True,
                                             continueEpochs=3000,
                                             totalError=0.0001)
        else:
            trainingErrors = trainer.trainUntilConvergence(
                maxEpochs=10000, continueEpochs=3000, validationProportion=0.1)
        # CV = CrossValidator(trainer, DS, n_folds=4, valfunc=ModuleValidator.MSE)
        # CV.validate()
        # CrossValidator
        # trainingErrors = trainer.trainUntilConvergence(maxEpochs=10000,continueEpochs=5000, validationProportion=0.1)
        # self.finalError = trainingErrors[0][-2]
        # self.finalerror=trainingErrors[0][-2]
        # if (self.verbose):
        #     print '最后总体容差:', self.finalError
        self.__sy = self.sy
        self.__sx = self.sx
        for i in range(len(xTrain)):
            a = self.sy.inverse_transform(
                self.__fnn.activate(xTrain[i]).reshape(-1, 1))
            self.restest.append(
                self.sy.inverse_transform(
                    self.__fnn.activate(xTrain[i]).reshape(-1, 1))[0][0])
            # print sy.inverse_transform(self.__fnn.activate(xTrain[i]).reshape(-1, 1))
            # sys.exit()
            # print sy.inverse_transform(fnn.activate(x))[0]
            # 在测试集上对其效果做验证
            # values = []
            # sy.inverse_transform()
            # for x in xTest:
            #     values.append(sy.inverse_transform(fnn.activate(x))[0])
            # for x in xTest:
            #     x1 = fnn.activate(x)
            #     x2 = sy.inverse_transform(x1.reshape(-1, 1))
            #     values.append(x2[0])
            # print "2"
            # 计算RMSE (Root Mean Squared Error)均方差
            # totalsum = sum(map(lambda x: x ** 0.5, map(lambda x, y: pow(x - y, 2), boston.target[per:], values))) / float(len(xTest))
            # print totalsum
            # print "3"

            # 将训练数据进行保存

    def saveresult(self, destname=None):
        if destname == None:
            destname = self.destname
        NetworkWriter.writeToFile(self.__fnn, destname + '.xml')
        joblib.dump(self.__sy, destname + '_sy.pkl', compress=3)
        joblib.dump(self.__sx, destname + '_sx.pkl', compress=3)
        # joblib.dump(sx, 'sx.pkl', compress=3)
        # joblib.dump(sy, 'sy.pkl', compress=3)

        # 将保存的数据读取
        # fnn = NetworkReader.readFrom('BM.xml')
        # sx = joblib.load('sx.pkl')
        # sy = joblib.load('sy.pkl')

    def printresult(self):
        for mod in self.__fnn.modules:
            print("Module:", mod.name)
            if mod.paramdim > 0:
                print("--parameters:", mod.params)
            for conn in self.__fnn.connections[mod]:
                print("-connection to", conn.outmod.name)
                # conn.whichBuffers
                if conn.paramdim > 0:
                    print("- parameters", conn.params)

            if hasattr(self.__fnn, "recurrentConns"):
                print("Recurrent connections")
                for conn in self.__fnn.recurrentConns:
                    print("-", conn.inmod.name, " to", conn.outmod.name)
                    if conn.paramdim > 0:
                        print("- parameters", conn.params)

    def getweight(self):
        self.weights = []
        for mod in self.__fnn.modules:
            for conn in self.__fnn.connections[mod]:
                print("-connection to", conn.outmod.name)
                if (conn.paramdim > 0) and (conn.inmod.name == 'inLayer'):
                    weights1 = conn.params.reshape(conn.indim, conn.outdim)
                    for pw in weights1:
                        dw = 0.0
                        for pw1 in pw:
                            dw += fabs(pw1)
                        self.weights.append(dw)
                    print('weights:', str(self.weights))
                    print("- parameters", conn.params)
        sw = MinMaxScaler()
        sw = sw.fit_transform(
            np.asarray(self.weights, dtype=float).reshape(-1, 1))
        print('sw:', str(sw))

    def printpilt(self, y, realy, savepng='', show=True):
        # plt.figure()
        plt.subplot(122)
        plt.plot(np.arange(0, len(y)), y, 'ro--', label='predict number')
        plt.plot(np.arange(0, len(y)), realy, 'ko-', label='true number')
        plt.legend()
        plt.xlabel("PointCount")
        plt.ylabel("Rate")
        if savepng != '':
            plt.savefig(savepng + '.png')
        # plt.get_current_fig_manager().frame.Maximize(True)
        # plt.get_current_fig_manager().full_screen_toggle()
        # plt.get_current_fig_manager().window.state('zoomed')
        if show:
            plt.show()
ds.addSample((1, 1), (0,))

for input, target in ds:
    print(input, target)
    
#define layers and connections
inLayer = LinearLayer(2)
hiddenLayerOne = SigmoidLayer(4, "one")
hiddenLayerTwo = SigmoidLayer(4, "two")
outLayer = LinearLayer(1)
inToHiddenOne = FullConnection(inLayer, hiddenLayerOne)
hiddenOneToTwo = FullConnection(hiddenLayerOne, hiddenLayerTwo)
hiddenTwoToOut = FullConnection(hiddenLayerTwo, outLayer)

#wire the layers and connections to a net
net = FeedForwardNetwork()
net.addInputModule(inLayer)
net.addModule(hiddenLayerOne)
net.addModule(hiddenLayerTwo)
net.addOutputModule(outLayer)
net.addConnection(inToHiddenOne)
net.addConnection(hiddenOneToTwo)
net.addConnection(hiddenTwoToOut)
net.sortModules()

print(net)

trainer = BackpropTrainer(net, ds)

for i in range(20):
    for j in range(1000):               
Example #40
0
    bias_to_out = FullConnection(biasUnit, outLayer)

    tosave = [ inLayer, hiddenLayer, outLayer, biasUnit, in_to_hidden, hidden_to_out, bias_to_hidden, bias_to_out ];

    return tosave


if (len(sys.argv) <= 3):
    saved = buildNet()
else:
    saved = pickle.load(open(sys.argv[3], "rb"));

pickle.dump( saved, open( "pablosemptynet.p", "wb" ) )


net = FeedForwardNetwork(name='mynet');

net.addInputModule(saved[0])
net.addModule(saved[1])
net.addOutputModule(saved[2])
net.addModule(saved[3])
net.addConnection(saved[4])
net.addConnection(saved[5])
net.addConnection(saved[6])
net.addConnection(saved[7])

net.sortModules()

trainer = BackpropTrainer(net, None, learningrate=lrate, verbose=False, batchlearning=True, weightdecay=wdecay)                                        
stressErrors=list();
phonemeErrors=list();
def buildSubsamplingNetwork():
    """ Builds a network with subsampling connections. """
    n = FeedForwardNetwork()
    n.addInputModule(LinearLayer(6, 'in'))
    n.addOutputModule(LinearLayer(1, 'out'))
    n.addConnection(SubsamplingConnection(n['in'], n['out'], inSliceTo=4))
    n.addConnection(SubsamplingConnection(n['in'], n['out'], inSliceFrom=4))
    n.sortModules()
    return n
Example #42
0
class PyBrainANNs:
    def __init__(self, x_dim, y_dim, hidden_size, s_id):
        self.serialize_id = s_id
        self.net = FeedForwardNetwork()

        in_layer = LinearLayer(x_dim)
        hidden_layer = SigmoidLayer(hidden_size)
        out_layer = LinearLayer(y_dim)
        self.net.addInputModule(in_layer)
        self.net.addModule(hidden_layer)
        self.net.addOutputModule(out_layer)

        in_to_hidden = FullConnection(in_layer, hidden_layer)
        hidden_to_out = FullConnection(hidden_layer, out_layer)
        self.net.addConnection(in_to_hidden)
        self.net.addConnection(hidden_to_out)

        self.net.sortModules()

    def _prepare_dataset(self, x_data, y_data):
        assert x_data.shape[0] == y_data.shape[0]

        if len(y_data.shape) == 1:
            y_matrix = np.matrix(y_data).T
        else:
            y_matrix = y_data.values

        assert x_data.shape[1] == self.net.indim
        assert y_matrix.shape[1] == self.net.outdim

        data_set = SupervisedDataSet(self.net.indim, self.net.outdim)
        data_set.setField("input", x_data)
        data_set.setField("target", y_matrix)

        return data_set

    def train(self, x_data, y_data):
        trainer = BackpropTrainer(self.net, self._prepare_dataset(x_data, y_data))
        trainer.train()

    def score(self, x_data, y_datas):
        return ModuleValidator.validate(regression_score, self.net, self._prepare_dataset(x_data, y_datas))

    def predict(self, x_data):
        return np.array([self.net.activate(sample) for sample in x_data])

    def save(self, path):
        joblib.dump(self.net, path)

    def load(self, path):
        self.net = joblib.load(path)
 def __init__(self, **args):
     FeedForwardNetwork.__init__(self, **args)
 def buildParity(self):
     self.params['dataset'] = 'parity'
     self.trn_data = ParityDataSet(nsamples=75)
     self.trn_data.setField('class', self.trn_data['target'])
     self.tst_data = ParityDataSet(nsamples=75)
     global trn_data
     trn_data = self.trn_data
     nn = FeedForwardNetwork()
     inLayer = TanhLayer(4, name='in')
     hiddenLayer = TanhLayer(6, name='hidden0')
     outLayer = ThresholdLayer(1, name='out')
     nn.addInputModule(inLayer)
     nn.addModule(hiddenLayer)
     nn.addOutputModule(outLayer)
     in_to_hidden = FullConnection(inLayer, hiddenLayer)
     hidden_to_out = FullConnection(hiddenLayer, outLayer)
     nn.addConnection(in_to_hidden)
     nn.addConnection(hidden_to_out)
     nn.sortModules()
     nn.randomize()
     self.net_settings = str(nn.connections)
     self.nn = nn
def createNN():
	nn = FeedForwardNetwork()
	inLayer = TanhLayer(4, name='in')
	hiddenLayer = TanhLayer(6, name='hidden0')
	outLayer = ThresholdLayer(3)
	nn.addInputModule(inLayer)
	nn.addModule(hiddenLayer)
	nn.addOutputModule(outLayer)
	in_to_hidden = FullConnection(inLayer, hiddenLayer)
	hidden_to_out = FullConnection(hiddenLayer, outLayer)
	nn.addConnection(in_to_hidden)
	nn.addConnection(hidden_to_out)
	nn.sortModules()
	return nn
def buildSharedCrossedNetwork():
    """ build a network with shared connections. Two hiddne modules are symetrically linked, but to a different 
    input neuron than the output neuron. The weights are random. """
    N = FeedForwardNetwork('shared-crossed')
    h = 1
    a = LinearLayer(2, name='a')
    b = LinearLayer(h, name='b')
    c = LinearLayer(h, name='c')
    d = LinearLayer(2, name='d')
    N.addInputModule(a)
    N.addModule(b)
    N.addModule(c)
    N.addOutputModule(d)

    m1 = MotherConnection(h)
    m1.params[:] = scipy.array((1, ))

    m2 = MotherConnection(h)
    m2.params[:] = scipy.array((2, ))

    N.addConnection(SharedFullConnection(m1, a, b, inSliceTo=1))
    N.addConnection(SharedFullConnection(m1, a, c, inSliceFrom=1))
    N.addConnection(SharedFullConnection(m2, b, d, outSliceFrom=1))
    N.addConnection(SharedFullConnection(m2, c, d, outSliceTo=1))
    N.sortModules()
    return N
def createNN():
    nn = FeedForwardNetwork()
    inLayer = TanhLayer(4, name='in')
    hiddenLayer = TanhLayer(6, name='hidden0')
    outLayer = ThresholdLayer(3)
    nn.addInputModule(inLayer)
    nn.addModule(hiddenLayer)
    nn.addOutputModule(outLayer)
    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_out = FullConnection(hiddenLayer, outLayer)
    nn.addConnection(in_to_hidden)
    nn.addConnection(hidden_to_out)
    nn.sortModules()
    return nn