Exemplo n.º 1
0
    def _buildStructure(self, inputdim, insize, inlayer, convSize, numFeatureMaps):
        #build layers
        outdim = insize - convSize + 1
        hlayer = TanhLayer(outdim * outdim * numFeatureMaps, name='h')
        self.addModule(hlayer)

        outlayer = SigmoidLayer(outdim * outdim, name='out')
        self.addOutputModule(outlayer)

        # build shared weights
        convConns = []
        for i in range(convSize):
            convConns.append(MotherConnection(convSize * numFeatureMaps * inputdim, name='conv' + str(i)))
        outConn = MotherConnection(numFeatureMaps)

        # establish the connections.
        for i in range(outdim):
            for j in range(outdim):
                offset = i * outdim + j
                outmod = ModuleSlice(hlayer, inSliceFrom=offset * numFeatureMaps, inSliceTo=(offset + 1) * numFeatureMaps,
                                     outSliceFrom=offset * numFeatureMaps, outSliceTo=(offset + 1) * numFeatureMaps)
                self.addConnection(SharedFullConnection(outConn, outmod, outlayer, outSliceFrom=offset, outSliceTo=offset + 1))

                for k, mc in enumerate(convConns):
                    offset = insize * (i + k) + j
                    inmod = ModuleSlice(inlayer, outSliceFrom=offset * inputdim, outSliceTo=offset * inputdim + convSize * inputdim)
                    self.addConnection(SharedFullConnection(mc, inmod, outmod))
Exemplo n.º 2
0
    def __init__(self, boardSize, convSize, numFeatureMaps, **args):
        inputdim = 2
        FeedForwardNetwork.__init__(self, **args)
        inlayer = LinearLayer(inputdim * boardSize * boardSize, name='in')
        self.addInputModule(inlayer)

        # we need some treatment of the border too - thus we pad the direct board input.
        x = convSize / 2
        insize = boardSize + 2 * x
        if convSize % 2 == 0:
            insize -= 1
        paddedlayer = LinearLayer(inputdim * insize * insize, name='pad')
        self.addModule(paddedlayer)

        # we connect a bias to the padded-parts (with shared but trainable weights).
        bias = BiasUnit()
        self.addModule(bias)
        biasConn = MotherConnection(inputdim)

        paddable = []
        if convSize % 2 == 0:
            xs = range(x) + range(insize - x + 1, insize)
        else:
            xs = range(x) + range(insize - x, insize)
        paddable.extend(crossproduct([range(insize), xs]))
        paddable.extend(crossproduct([xs, range(x, boardSize + x)]))

        for (i, j) in paddable:
            self.addConnection(
                SharedFullConnection(biasConn,
                                     bias,
                                     paddedlayer,
                                     outSliceFrom=(i * insize + j) * inputdim,
                                     outSliceTo=(i * insize + j + 1) *
                                     inputdim))

        for i in range(boardSize):
            inmod = ModuleSlice(inlayer,
                                outSliceFrom=i * boardSize * inputdim,
                                outSliceTo=(i + 1) * boardSize * inputdim)
            outmod = ModuleSlice(paddedlayer,
                                 inSliceFrom=((i + x) * insize + x) * inputdim,
                                 inSliceTo=((i + x) * insize + x + boardSize) *
                                 inputdim)
            self.addConnection(IdentityConnection(inmod, outmod))

        self._buildStructure(inputdim, insize, paddedlayer, convSize,
                             numFeatureMaps)
        self.sortModules()
Exemplo n.º 3
0
 def slicer():
     nbunits = reduce(lambda x, y: x * y, dimensions, 1)
     insize = layer.indim / nbunits
     outsize = layer.outdim / nbunits
     for index in range(nbunits):
         yield ModuleSlice(layer, insize * index, insize * (index + 1),
                           outsize * index, outsize * (index + 1))
Exemplo n.º 4
0
 def stateSlice(self):
     """Return a moduleslice that wraps the state transfer part of the layer.
     """
     return ModuleSlice(self,
                        inSliceFrom=self.dim * (3 + self.dimensions),
                        outSliceFrom=self.dim)
Exemplo n.º 5
0
 def meatSlice(self):
     """Return a moduleslice that wraps the meat part of the layer."""
     return ModuleSlice(self,
                        inSliceTo=self.dim * (3 + self.dimensions),
                        outSliceTo=self.dim)
Exemplo n.º 6
0
    def __init__(self, R, P, T):
        """
		FeedForwardNetworks are networks that do not work for sequential data. 
		Every input is treated as independent of any previous or following inputs.

		"""

        self._ffn = FeedForwardNetwork()
        """
			Input layer:
		    R_iP_j (region i, product j) at times (t-T, .., t-1)

		    T - time interval
		    R - number of regions
		    P - number of products

		    RPT - dimensionality of input layer

		    ***
		    input SORTED in ORDER RPT
		    ***
		"""

        dim = T * R * P

        inputL = LinearLayer(T, name="input layer")
        """
			Layer 1:
		    groups of neurons for R_iP_j
		    k_1*R*P
		"""

        k1 = T / 25
        h1 = k1 * R * P
        # weighted average
        hiddenL_1 = LinearLayer(h1, name="hidden layer 1 - R_iP_j")

        k2 = k1 / 2
        h2 = k2 * (R + P)
        hiddenL_2 = TanhLayer(h2, name="hidden layer 2 - R_i, P_j")

        h3 = 2 * h2
        hiddenL_3 = TanhLayer(h3, name="hidden layer 3 - random nodes")

        outputL = LinearLayer(R * P, "output layer")
        """
			add layers to network
		"""
        self._ffn.addInputModule(inputL)
        self._ffn.addOutputModule(outputL)

        self._ffn.addModule(hiddenL_1)
        self._ffn.addModule(hiddenL_2)
        self._ffn.addModule(hiddenL_3)
        """
			create connections between layers
		"""

        # INPUT => 1ST HIDDEN LAYER

        # T*k2 weights per slice
        # mother connection to hold shared weights
        mc1 = MotherConnection(T * k1, name="sharedConnection")

        # keep slice indices to check
        inSlices = dict()
        outSlices = dict()

        # keep slices to check
        inputSlices = dict()
        h1Slices = dict()

        # keep connections to check
        sharedConn = dict()

        for i in range(R * P):
            outSlices[i] = (i * T, (i + 1) * T - 1)
            inSlices[i] = (i * k1, (i + 1) * k1 - 1)

            print outSlices[i], inSlices[i]

            inputSlices[i] = ModuleSlice(inputL,
                                         inSliceFrom=outSlices[i][0],
                                         inSliceTo=outSlices[i][1],
                                         outSliceFrom=outSlices[i][0],
                                         outSliceTo=outSlices[i][1])
            print inputSlices[i]
            h1Slices[i] = ModuleSlice(hiddenL_1,
                                      inSliceFrom=inSlices[i][0],
                                      inSliceTo=inSlices[i][1],
                                      outSliceFrom=inSlices[i][0],
                                      outSliceTo=inSlices[i][1])
            print h1Slices[i]

            sharedConn[i] = SharedFullConnection(mc1, inputSlices[i],
                                                 h1Slices[i])
            #print sharedConn[i].params

        for conn in sharedConn.itervalues():
            #print conn
            #print conn.params
            self._ffn.addConnection(conn)

        # 1ST HIDDEN LAYER => 2ND HIDDEN LAYER
        h2_inIndices = dict()
        h2_inSlices = dict()
        for i in range(R + P):
            h2_inIndices[i] = (k2 * i, k2 * (i + 1) - 1)
            print h2_inIndices[i]
            # no outSlices for h2 since it will be fully connected to h3
            h2_inSlices[i] = ModuleSlice(
                hiddenL_2,
                inSliceFrom=h2_inIndices[i][0],
                inSliceTo=h2_inIndices[i][1]
            )  #, outSliceFrom=h2_inIndices[i][0], outSliceTo=h2_inIndices[i][1])

        # link each R_iP_j h1Slice with R_i and P_j h2_inSlices respectively
        h1h2Conn = dict()
        # there are R*P h1 slices, take every P slices and link them to P_i
        rj = 0
        pj = R - 1
        for i in range(R * P):
            #print "before",rj, pj,i
            if i != 0 and i % P == 0:
                rj = rj + 1
                pj = R
            else:
                pj = pj + 1

            #print rj, pj

            print h1Slices[i], h2_inSlices[rj]

            h1h2Conn[i] = FullConnection(h1Slices[i],
                                         h2_inSlices[rj],
                                         name="h1_h2_" + str(i))
            print h1Slices[i], h2_inSlices[pj]
            h1h2Conn[R * P + i] = FullConnection(h1Slices[i],
                                                 h2_inSlices[pj],
                                                 name="h1_h2_" +
                                                 str(R * P + i))

        for conn in h1h2Conn.itervalues():
            print conn
            print conn.params
            self._ffn.addConnection(conn)
        """
			CAREFUL: for test numbers only 3 params for each pair of connected slices although it should be 4*2=8??
			
		# full connection between Region and State layer and random hidden layer
		self._ffn.addConnection(FullConnection(hiddenL_2, hiddenL_3))

		# full connection from random to output layer
		self._ffn.addConnection(FullConnection(hiddenL_3, outputL))

		"""
        self._ffn.sortModules()