def buildSlicedNetwork():
    """ build a network with shared connections. Two hiddne modules are symetrically linked, but to a different 
    input neuron than the output neuron. The weights are random. """
    N = FeedForwardNetwork('sliced')
    a = LinearLayer(2, name = 'a')
    b = LinearLayer(2, name = 'b')
    N.addInputModule(a)
    N.addOutputModule(b)
    
    N.addConnection(FullConnection(a, b, inSliceTo=1, outSliceFrom=1))
    N.addConnection(FullConnection(a, b, inSliceFrom=1, outSliceTo=1))
    N.sortModules()
    return N
Beispiel #2
0
    def __init__(self, boardSize, convSize, numFeatureMaps, **args):
        inputdim = 2
        FeedForwardNetwork.__init__(self, **args)
        inlayer = LinearLayer(inputdim * boardSize * boardSize, name='in')
        self.addInputModule(inlayer)

        # we need some treatment of the border too - thus we pad the direct board input.
        x = convSize / 2
        insize = boardSize + 2 * x
        if convSize % 2 == 0:
            insize -= 1
        paddedlayer = LinearLayer(inputdim * insize * insize, name='pad')
        self.addModule(paddedlayer)

        # we connect a bias to the padded-parts (with shared but trainable weights).
        bias = BiasUnit()
        self.addModule(bias)
        biasConn = MotherConnection(inputdim)

        paddable = []
        if convSize % 2 == 0:
            xs = list(range(x)) + list(range(insize - x + 1, insize))
        else:
            xs = list(range(x)) + list(range(insize - x, insize))
        paddable.extend(crossproduct([list(range(insize)), xs]))
        paddable.extend(crossproduct([xs, list(range(x, boardSize + x))]))

        for (i, j) in paddable:
            self.addConnection(
                SharedFullConnection(biasConn,
                                     bias,
                                     paddedlayer,
                                     outSliceFrom=(i * insize + j) * inputdim,
                                     outSliceTo=(i * insize + j + 1) *
                                     inputdim))

        for i in range(boardSize):
            inmod = ModuleSlice(inlayer,
                                outSliceFrom=i * boardSize * inputdim,
                                outSliceTo=(i + 1) * boardSize * inputdim)
            outmod = ModuleSlice(paddedlayer,
                                 inSliceFrom=((i + x) * insize + x) * inputdim,
                                 inSliceTo=((i + x) * insize + x + boardSize) *
                                 inputdim)
            self.addConnection(IdentityConnection(inmod, outmod))

        self._buildStructure(inputdim, insize, paddedlayer, convSize,
                             numFeatureMaps)
        self.sortModules()
 def __init__(self, inputdim, insize, convSize, numFeatureMaps, **args):
     FeedForwardNetwork.__init__(self, **args)
     inlayer = LinearLayer(inputdim * insize * insize)
     self.addInputModule(inlayer)
     self._buildStructure(inputdim, insize, inlayer, convSize, numFeatureMaps)
     self.sortModules()
def buildSharedCrossedNetwork():
    """ build a network with shared connections. Two hiddne modules are symetrically linked, but to a different 
    input neuron than the output neuron. The weights are random. """
    N = FeedForwardNetwork('shared-crossed')
    h = 1
    a = LinearLayer(2, name='a')
    b = LinearLayer(h, name='b')
    c = LinearLayer(h, name='c')
    d = LinearLayer(2, name='d')
    N.addInputModule(a)
    N.addModule(b)
    N.addModule(c)
    N.addOutputModule(d)

    m1 = MotherConnection(h)
    m1.params[:] = scipy.array((1, ))

    m2 = MotherConnection(h)
    m2.params[:] = scipy.array((2, ))

    N.addConnection(SharedFullConnection(m1, a, b, inSliceTo=1))
    N.addConnection(SharedFullConnection(m1, a, c, inSliceFrom=1))
    N.addConnection(SharedFullConnection(m2, b, d, outSliceFrom=1))
    N.addConnection(SharedFullConnection(m2, c, d, outSliceTo=1))
    N.sortModules()
    return N
    def __init__(self, predefined=None, **kwargs):
        """ For the current implementation, the sequence length 
        needs to be fixed, and given at construction time. """
        if predefined is not None:
            self.predefined = predefined
        else:
            self.predefined = {}
        FeedForwardNetwork.__init__(self, **kwargs)
        assert self.seqlen is not None

        # the input is a 1D-mesh (as a view on a flat input layer)
        inmod = LinearLayer(self.inputsize * self.seqlen, name='input')
        inmesh = ModuleMesh.viewOnFlatLayer(inmod, (self.seqlen, ), 'inmesh')

        # the output is also a 1D-mesh
        outmod = self.outcomponentclass(self.outputsize * self.seqlen,
                                        name='output')
        outmesh = ModuleMesh.viewOnFlatLayer(outmod, (self.seqlen, ),
                                             'outmesh')

        # the hidden layers are places in a 2xseqlen mesh
        hiddenmesh = ModuleMesh.constructWithLayers(self.componentclass,
                                                    self.hiddensize,
                                                    (2, self.seqlen), 'hidden')

        # add the modules
        for c in inmesh:
            self.addInputModule(c)
        for c in outmesh:
            self.addOutputModule(c)
        for c in hiddenmesh:
            self.addModule(c)

        # set the connections weights to be shared
        inconnf = MotherConnection(inmesh.componentOutdim *
                                   hiddenmesh.componentIndim,
                                   name='inconn')
        outconnf = MotherConnection(outmesh.componentIndim *
                                    hiddenmesh.componentOutdim,
                                    name='outconn')
        forwardconn = MotherConnection(hiddenmesh.componentIndim *
                                       hiddenmesh.componentOutdim,
                                       name='fconn')
        if self.symmetric:
            backwardconn = forwardconn
            inconnb = inconnf
            outconnb = outconnf
        else:
            backwardconn = MotherConnection(hiddenmesh.componentIndim *
                                            hiddenmesh.componentOutdim,
                                            name='bconn')
            inconnb = MotherConnection(inmesh.componentOutdim *
                                       hiddenmesh.componentIndim,
                                       name='inconn')
            outconnb = MotherConnection(outmesh.componentIndim *
                                        hiddenmesh.componentOutdim,
                                        name='outconn')

        # build the connections
        for i in range(self.seqlen):
            # input to hidden
            self.addConnection(
                SharedFullConnection(inconnf, inmesh[(i, )],
                                     hiddenmesh[(0, i)]))
            self.addConnection(
                SharedFullConnection(inconnb, inmesh[(i, )],
                                     hiddenmesh[(1, i)]))
            # hidden to output
            self.addConnection(
                SharedFullConnection(outconnf, hiddenmesh[(0, i)],
                                     outmesh[(i, )]))
            self.addConnection(
                SharedFullConnection(outconnb, hiddenmesh[(1, i)],
                                     outmesh[(i, )]))
            if i > 0:
                # forward in time
                self.addConnection(
                    SharedFullConnection(forwardconn, hiddenmesh[(0, i - 1)],
                                         hiddenmesh[(0, i)]))
            if i < self.seqlen - 1:
                # backward in time
                self.addConnection(
                    SharedFullConnection(backwardconn, hiddenmesh[(1, i + 1)],
                                         hiddenmesh[(1, i)]))

        self.sortModules()
Beispiel #6
0
def _buildNetwork(*layers, **options):
    """This is a helper function to create different kinds of networks.

    `layers` is a list of tuples. Each tuple can contain an arbitrary number of
    layers, each being connected to the next one with IdentityConnections. Due 
    to this, all layers have to have the same dimension. We call these tuples
    'parts.'
    
    Afterwards, the last layer of one tuple is connected to the first layer of 
    the following tuple by a FullConnection.
    
    If the keyword argument bias is given, BiasUnits are added additionally with
    every FullConnection. 

    Example:
    
        _buildNetwork(
            (LinearLayer(3),),
            (SigmoidLayer(4), GaussianLayer(4)),
            (SigmoidLayer(3),),
        )
    """
    bias = options['bias'] if 'bias' in options else False

    net = FeedForwardNetwork()
    layerParts = iter(layers)
    firstPart = iter(next(layerParts))
    firstLayer = next(firstPart)
    net.addInputModule(firstLayer)

    prevLayer = firstLayer

    for part in chain(firstPart, layerParts):
        new_part = True
        for layer in part:
            net.addModule(layer)
            # Pick class depending on whether we entered a new part
            if new_part:
                ConnectionClass = FullConnection
                if bias:
                    biasUnit = BiasUnit('BiasUnit for %s' % layer.name)
                    net.addModule(biasUnit)
                    net.addConnection(FullConnection(biasUnit, layer))
            else:
                ConnectionClass = IdentityConnection
            new_part = False
            conn = ConnectionClass(prevLayer, layer)
            net.addConnection(conn)
            prevLayer = layer
    net.addOutputModule(layer)
    net.sortModules()
    return net