def add_modules(net):
    modules = {}

    #define modules
    modules['inp'] = LinearLayer(400)
    modules["input_bias"] = BiasUnit()
    modules['h1'] = TanhLayer(300)
    modules['h1_bias'] = BiasUnit()
    modules['h2'] = TanhLayer(200)
    modules['h2_bias'] = BiasUnit()
    #modules['h3'] = neurons.euclideanDistance(100)
    modules['outp'] = SoftmaxLayer(2)
    modules['output_bias'] = BiasUnit()

    # add modules
    net.addInputModule(modules['inp'])
    net.addOutputModule(modules['outp'])
    net.addModule(modules['h1'])
    net.addModule(modules['h2'])
    net.addModule(modules['input_bias'])
    net.addModule(modules['h1_bias'])
    net.addModule(modules['h2_bias'])
    net.addModule(modules['output_bias'])
    #net.addModule(modules['h3'])

    return modules
示例#2
0
def custom_build_network(layer_sizes):
    net = FeedForwardNetwork()

    layers = []
    inp = SigmoidLayer(layer_sizes[0], name='visible')
    h1 = SigmoidLayer(layer_sizes[1], name='hidden1')
    h2 = SigmoidLayer(layer_sizes[2], name='hidden2')
    out = SigmoidLayer(layer_sizes[3], name='out')
    bias = BiasUnit(name='bias')

    net.addInputModule(inp)
    net.addModule(h1)
    net.addModule(h2)
    net.addOutputModule(out)
    net.addModule(bias)

    net.addConnection(FullConnection(inp, h1))
    net.addConnection(FullConnection(h1, h2))
    net.addConnection(FullConnection(h2, out))

    net.addConnection(FullConnection(bias, h1))
    net.addConnection(FullConnection(bias, h2))
    net.addConnection(FullConnection(bias, out))

    net.sortModules()
    return net
示例#3
0
    def _buildBorderStructure(self, inmesh, hiddenmesh, outmesh):
        self._buildSwipingStructure(inmesh, hiddenmesh, outmesh)
        self.addModule(BiasUnit(name='bias'))

        # build the motherconnections for the borders
        if self.simpleborders:
            if not 'borderconn' in self.predefined:
                self.predefined['borderconn'] = MotherConnection(
                    hiddenmesh.componentIndim, name='bconn')
        else:
            if not 'bordconns' in self.predefined:
                self.predefined['bordconns'] = {}
            for dim, maxval in enumerate(self.dims):
                if dim > 0 and self.symmetricdimensions:
                    self.predefined['bordconns'][dim] = self.predefined[
                        'bordconns'][0]
                elif dim not in self.predefined['bordconns']:
                    self.predefined['bordconns'][dim] = {}
                tmp = self.predefined['bordconns'][dim].copy()
                if len(self.dims) == 1 and () not in tmp:
                    tmp[()] = MotherConnection(hiddenmesh.componentIndim,
                                               name='bconn')
                for t in iterCombinations(tupleRemoveItem(self.dims, dim)):
                    tc = self._canonicForm(t, dim)
                    if t == tc and t not in tmp:
                        # the connections from the borders are symetrical,
                        # so we need seperate ones only up to the middle
                        tmp[t] = MotherConnection(hiddenmesh.componentIndim,
                                                  name='bconn' + str(dim) +
                                                  str(t))
                        if self.extrapolateBorderValues:
                            p = self._extrapolateBorderAt(
                                t, self.predefined['bordconns'][dim])
                            if p != None:
                                tmp[t].params[:] = p
                self.predefined['bordconns'][dim] = tmp

        # link the bordering units to the bias, using the correct connection
        for dim, maxval in enumerate(self.dims):
            for unit in self._iterateOverUnits():
                if self.simpleborders:
                    bconn = self.predefined['borderconn']
                else:
                    tc = self._canonicForm(tupleRemoveItem(unit, dim), dim)
                    bconn = self.predefined['bordconns'][dim][tc]
                hunits = []
                if unit[dim] == 0:
                    for swipe in range(self.swipes):
                        if (swipe / 2**dim) % 2 == 0:
                            hunits.append(tuple(list(unit) + [swipe]))
                if unit[dim] == maxval - 1:
                    for swipe in range(self.swipes):
                        if (swipe / 2**dim) % 2 == 1:
                            hunits.append(tuple(list(unit) + [swipe]))
                for hunit in hunits:
                    self.addConnection(
                        SharedFullConnection(bconn, self['bias'],
                                             hiddenmesh[hunit]))
示例#4
0
def _buildNetwork(*layers, **options):
    """This is a helper function to create different kinds of networks.

    `layers` is a list of tuples. Each tuple can contain an arbitrary number of
    layers, each being connected to the next one with IdentityConnections. Due 
    to this, all layers have to have the same dimension. We call these tuples
    'parts.'
    
    Afterwards, the last layer of one tuple is connected to the first layer of 
    the following tuple by a FullConnection.
    
    If the keyword argument bias is given, BiasUnits are added additionally with
    every FullConnection. 

    Example:
    
        _buildNetwork(
            (LinearLayer(3),),
            (SigmoidLayer(4), GaussianLayer(4)),
            (SigmoidLayer(3),),
        )
    """
    bias = options['bias'] if 'bias' in options else False

    net = FeedForwardNetwork()
    layerParts = iter(layers)
    firstPart = iter(layerParts.next())
    firstLayer = firstPart.next()
    net.addInputModule(firstLayer)

    prevLayer = firstLayer

    for part in chain(firstPart, layerParts):
        new_part = True
        for layer in part:
            net.addModule(layer)
            # Pick class depending on wether we entered a new part
            if new_part:
                ConnectionClass = FullConnection
                if bias:
                    biasUnit = BiasUnit('BiasUnit for %s' % layer.name)
                    net.addModule(biasUnit)
                    net.addConnection(FullConnection(biasUnit, layer))
            else:
                ConnectionClass = IdentityConnection
            new_part = False
            conn = ConnectionClass(prevLayer, layer)
            net.addConnection(conn)
            prevLayer = layer
    net.addOutputModule(layer)
    net.sortModules()
    return net
示例#5
0
def buildNetwork(*layers, **options):
    """Build arbitrary deep networks.
    
    `layers` should be a list or tuple of integers, that indicate how many 
    neurons the layers shoudl have. `bias` and `outputbias` are flags to 
    indicate wether the network should have the corresponding biases; both
    default to True.
        
    To adjust the classes for the layers use the `hiddenclass` and  `outclass`
    parameters, which expect a subclass of NeuronLayer.
    
    If the `recurrent` flag is set, a RecurrentNetwork will be created, 
    otherwise a FeedForwardNetwork.
    
    If the `fast` flag is set, faster arac networks will be used instead of the 
    pybrain implementations."""
    # options
    opt = {
        'bias': True,
        'hiddenclass': SigmoidLayer,
        'outclass': LinearLayer,
        'outputbias': True,
        'peepholes': False,
        'recurrent': False,
        'fast': False,
    }
    for key in options:
        if key not in opt.keys():
            raise NetworkError('buildNetwork unknown option: %s' % key)
        opt[key] = options[key]

    if len(layers) < 2:
        raise NetworkError(
            'buildNetwork needs 2 arguments for input and output layers at least.'
        )

    # Bind the right class to the Network name
    network_map = {
        (False, False): FeedForwardNetwork,
        (True, False): RecurrentNetwork,
    }
    try:
        network_map[(False, True)] = _FeedForwardNetwork
        network_map[(True, True)] = _RecurrentNetwork
    except NameError:
        if opt['fast']:
            raise NetworkError("No fast networks available.")
    if opt['hiddenclass'].sequential or opt['outclass'].sequential:
        if not opt['recurrent']:
            # CHECKME: a warning here?
            opt['recurrent'] = True
    Network = network_map[opt['recurrent'], opt['fast']]
    n = Network()
    # linear input layer
    n.addInputModule(LinearLayer(layers[0], name='in'))
    # output layer of type 'outclass'
    n.addOutputModule(opt['outclass'](layers[-1], name='out'))
    if opt['bias']:
        # add bias module and connection to out module, if desired
        n.addModule(BiasUnit(name='bias'))
        if opt['outputbias']:
            n.addConnection(FullConnection(n['bias'], n['out']))
    # arbitrary number of hidden layers of type 'hiddenclass'
    for i, num in enumerate(layers[1:-1]):
        layername = 'hidden%i' % i
        n.addModule(opt['hiddenclass'](num, name=layername))
        if opt['bias']:
            # also connect all the layers with the bias
            n.addConnection(FullConnection(n['bias'], n[layername]))
    # connections between hidden layers
    for i in range(len(layers) - 3):
        n.addConnection(
            FullConnection(n['hidden%i' % i], n['hidden%i' % (i + 1)]))
    # other connections
    if len(layers) == 2:
        # flat network, connection from in to out
        n.addConnection(FullConnection(n['in'], n['out']))
    else:
        # network with hidden layer(s), connections from in to first hidden and last hidden to out
        n.addConnection(FullConnection(n['in'], n['hidden0']))
        n.addConnection(
            FullConnection(n['hidden%i' % (len(layers) - 3)], n['out']))

    # recurrent connections
    if issubclass(opt['hiddenclass'], LSTMLayer):
        if len(layers) > 3:
            errorexit(
                "LSTM networks with > 1 hidden layers are not supported!")
        n.addRecurrentConnection(FullConnection(n['hidden0'], n['hidden0']))

    n.sortModules()
    return n
示例#6
0
trainingData = ClassificationDataSet(64 * 64 * 3, nb_classes=2)
for n in xrange(0, trainingDataTemp.getLength()):
    trainingData.addSample(
        trainingDataTemp.getSample(n)[0],
        trainingDataTemp.getSample(n)[1])

# reencode outputs, necessary for training accurately
testingData._convertToOneOfMany()
trainingData._convertToOneOfMany()

##### BUILD ANN #####
# build feed-forward multi-layer perceptron ANN
fnn = FeedForwardNetwork()

# create layers: 9 input layer nodes (8 features + 1 bias), 3 hidden layer nodes, 10 output layer nodes
bias = BiasUnit(name='bias unit')
input_layer = LinearLayer(64 * 64 * 3, name='input layer')
hidden_layer = SigmoidLayer(64 * 64 * 3 / 2, name='hidden layer')
output_layer = SigmoidLayer(2, name='output layer')

# create connections with full connectivity between layers
bias_to_hidden = FullConnection(bias, hidden_layer, name='bias-hid')
bias_to_output = FullConnection(bias, output_layer, name='bias-out')
input_to_hidden = FullConnection(input_layer, hidden_layer, name='in-hid')
hidden_to_output = FullConnection(hidden_layer, output_layer, name='hid-out')

# add layers & connections to network
fnn.addModule(bias)
fnn.addInputModule(input_layer)
fnn.addModule(hidden_layer)
fnn.addOutputModule(output_layer)
示例#7
0
    def __init__(self,
                 timedim,
                 shape,
                 hiddendim,
                 outsize,
                 blockshape=None,
                 name=None):
        """Initialize an MdrnnLayer.

        The dimensionality of the sequence - for example 2 for a
        picture or 3 for a video - is given by `timedim`, while the sidelengths
        along each dimension are given by the tuple `shape`.

        The layer will have `hiddendim` hidden units per swiping direction. The
        number of swiping directions is given by 2**timedim, which corresponds
        to one swipe from each corner to its opposing corner and back.

        To indicate how many outputs per timesteps are used, you have to specify
        `outsize`.

        In order to treat blocks of the input and not single voxels, you can
        also specify `blockshape`. For example the layer will then feed (2, 2)
        chunks into the network at each timestep which correspond to the (2, 2)
        rectangles that the input can be split into.
        """
        self.timedim = timedim
        self.shape = shape
        blockshape = tuple([1] * timedim) if blockshape is None else blockshape
        self.blockshape = shape
        self.hiddendim = hiddendim
        self.outsize = outsize
        self.indim = reduce(operator.mul, shape, 1)
        self.blocksize = reduce(operator.mul, blockshape, 1)
        self.sequenceLength = self.indim / self.blocksize
        self.outdim = self.sequenceLength * self.outsize

        self.bufferlist = [('cellStates', self.sequenceLength * self.hiddendim)
                           ]

        Module.__init__(self, self.indim, self.outdim, name=name)

        # Amount of parameters that are required for the input to the hidden
        self.num_in_params = self.blocksize * self.hiddendim * (3 +
                                                                self.timedim)

        # Amount of parameters that are needed for the recurrent connections.
        # There is one of the parameter for every time dimension.
        self.num_rec_params = outsize * hiddendim * (3 + self.timedim)

        # Amount of parameters that are needed for the output.
        self.num_out_params = outsize * hiddendim

        # Amount of parameters that are needed from the bias to the hidden and
        # the output
        self.num_bias_params = (3 +
                                self.timedim) * self.hiddendim + self.outsize

        # Total list of parameters.
        self.num_params = sum(
            (self.num_in_params, self.timedim * self.num_rec_params,
             self.num_out_params, self.num_bias_params))

        ParameterContainer.__init__(self, self.num_params)

        # Some layers for internal use.
        self.hiddenlayer = MDLSTMLayer(self.hiddendim, self.timedim)

        # Every point in the sequence has timedim predecessors.
        self.predlayers = [LinearLayer(self.outsize) for _ in range(timedim)]

        # We need a single layer to hold the input. We will swipe a connection
        # over the corrects part of it, in order to feed the correct input in.
        self.inlayer = LinearLayer(self.indim)
        # Make some layers the same to save memory.
        self.inlayer.inputbuffer = self.inlayer.outputbuffer = self.inputbuffer

        # In order to allocate not too much memory, we just set the size of the
        # layer to 1 and correct it afterwards.
        self.outlayer = LinearLayer(self.outdim)
        self.outlayer.inputbuffer = self.outlayer.outputbuffer = self.outputbuffer

        self.bias = BiasUnit()
示例#8
0
#6.6
from pybrain.structure import LinearLayer, SigmoidLayer
from pybrain.datasets import SupervisedDataSet
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.structure import FeedForwardNetwork
from pybrain.structure import FullConnection
from pybrain.structure.modules import BiasUnit

import random

#Create network modules
net = FeedForwardNetwork()
inl = LinearLayer(2)
hidl = SigmoidLayer(2)
outl = LinearLayer(1)
b = BiasUnit()

#6.7
#Create connections
in_to_h = FullConnection(inl, hidl)
h_to_out = FullConnection(hidl, outl)
bias_to_h = FullConnection(b, hidl)
bias_to_out = FullConnection(b, outl)

#Add modules to net
net.addInputModule(inl)
net.addModule(hidl)
net.addModule(b)
net.addOutputModule(outl)

#Add connections to net and sort
示例#9
0
    def __init__(self, rs):
        regression.__init__(self,rs)
        self.learningRate=rs.learningRate
        self.momentum=rs.momentum
        
        self.net = FeedForwardNetwork()
        
        #input Layer
        inLayer = layersDict[rs.inputLayer](rs.inputDim)
        self.net.addInputModule(inLayer)
        
        #outputLayer
        outLayer = layersDict[rs.outputLayer](rs.outputDim)
        self.net.addOutputModule(outLayer)
        
        #no hidden Layer
        if(len(rs.hiddenLayers)==0):
            #connection between input and output Layer
            in_to_out = FullConnection(inLayer, outLayer)
            self.net.addConnection(in_to_out)
            if(rs.bias==True):
                bias= BiasUnit('bias')
                self.net.addModule(bias)
                bias_to_out = FullConnection(bias, outLayer)
                self.net.addConnection(bias_to_out)
        else :
            #hidden Layers
            hiddenLayers=[]
            for layer in rs.hiddenLayers:
                tmp=layersDict[layer[0]](layer[1])
                self.net.addModule(tmp)
                hiddenLayers.append(tmp)
             
            #connection between input and first hidden Layer  
            in_to_hidden=FullConnection(inLayer,hiddenLayers[0])
            self.net.addConnection(in_to_hidden)
            
            #connection between hidden Layers
            i=0
            for i in range(1,len(hiddenLayers)):
                hidden_to_hidden=FullConnection(hiddenLayers[i-1],hiddenLayers[i])
                self.net.addConnection(hidden_to_hidden)
            
            #connection between last hidden Layer and output Layer   
            hidden_to_out= FullConnection(hiddenLayers[i],outLayer)
            self.net.addConnection(hidden_to_out)     
            
            if(rs.bias==True):
                bias=BiasUnit('bias')
                self.net.addModule(bias)
                for layer in hiddenLayers :
                    bias_to_hidden = FullConnection(bias, layer)
                    self.net.addConnection(bias_to_hidden)
                
                bias_to_out = FullConnection(bias, outLayer)
                self.net.addConnection(bias_to_out)
                

        
        #initilisation of weight
        self.net.sortModules()
        self.shape=self.net.params.shape
        self.net._setParameters(np.random.normal(0.0,0.1,self.shape))
            
        
        self.ds = SupervisedDataSet(self.inputDimension, self.outputDimension)
示例#10
0
    stddev: vector
    """
    tmp = -0.5 * ((x - mean) / stddev)**2
    return np.exp(tmp) / (np.sqrt(2. * np.pi) * stddev)


if __name__ == '__main__':
    # build a network
    n = FeedForwardNetwork()
    # linear input layer
    n.addInputModule(LinearLayer(1, name='in'))
    # output layer of type 'outclass'
    N_GAUSSIANS = 3
    n.addOutputModule(MixtureDensityLayer(dim=1, name='out', mix=N_GAUSSIANS))
    # add bias module and connection to out module
    n.addModule(BiasUnit(name='bias'))
    n.addConnection(FullConnection(n['bias'], n['out']))

    # arbitrary number of hidden layers of type 'hiddenclass'
    n.addModule(SigmoidLayer(5, name='hidden'))
    n.addConnection(FullConnection(n['bias'], n['hidden']))

    # network with hidden layer(s), connections
    # from in to first hidden and last hidden to out
    n.addConnection(FullConnection(n['in'], n['hidden']))
    n.addConnection(FullConnection(n['hidden'], n['out']))
    n.sortModules()
    n._setParameters(np.random.uniform(-0.1, 0.1, size=n.paramdim))

    # build some data
    y = np.arange(0.0, 1.0, 0.005).reshape(200, 1)
示例#11
0
def trainNetwork(train_ds, test_ds,
                 train_ds_labels, test_ds_labels,
                 features,
                 learningrate, lrdecay,
                 momentum, weightdecay,
                 hidden_layers,
                 time_limit_seconds):
    fnn = FeedForwardNetwork()
    inLayer = LinearLayer(train_ds.indim)
    fnn.addInputModule(inLayer)
    lastLayer = inLayer
    connection_number = 0 # connection-0 is the connection from the input layer.
    for hidden_layer_size in hidden_layers:
#        hiddenLayer = SigmoidLayer(hidden_layer_size)
        hiddenLayer = TanhLayer(hidden_layer_size)
        fnn.addModule(hiddenLayer)
        fnn.addConnection(
            FullConnection(lastLayer, hiddenLayer,
                           name="connection-%d" % connection_number))
        connection_number = connection_number + 1
        bias = BiasUnit()
        fnn.addModule(bias)
        fnn.addConnection(FullConnection(bias, hiddenLayer))
        lastLayer = hiddenLayer
    outLayer = SigmoidLayer(train_ds.outdim)
    fnn.addOutputModule(outLayer)
    fnn.addConnection(
        FullConnection(lastLayer, outLayer,
                       name="connection-%d" % connection_number))
    bias = BiasUnit()
    fnn.addModule(bias)
    fnn.addConnection(FullConnection(bias, outLayer))
    fnn.sortModules()

    trainer = BackpropTrainer(fnn, dataset=train_ds,
                              learningrate=learningrate,
                              lrdecay=lrdecay,
                              momentum=momentum,
                              verbose=False,
                              weightdecay=weightdecay)

    # Train
    (initial_train_error, initial_train_F1) = percentClassErrorAndF1(fnn, train_ds, train_ds_labels, features)
    train_errors = [initial_train_error]
    train_F1s = [initial_train_F1]
    (initial_test_error, initial_test_F1) = percentClassErrorAndF1(fnn, test_ds, test_ds_labels, features)
    test_errors = [initial_test_error]
    test_F1s = [initial_test_F1]
    train_algo_errors = [trainer.testOnData(train_ds) * 100]
    test_algo_errors = [trainer.testOnData(test_ds) * 100]
    epochs = [0]
    try:
        start_time = time.time()
        for i in range(200):
            for _ in xrange(50):
                train_algo_error = trainer.train() * 100.0
                if math.isnan(train_algo_error):
                    break
            if math.isnan(train_algo_error):
                break
            (trnresult, trnF1) = percentClassErrorAndF1(fnn, train_ds, train_ds_labels, features)
            (tstresult, tstF1) = percentClassErrorAndF1(fnn, test_ds, test_ds_labels, features)
            test_algo_error = trainer.testOnData(test_ds)* 100
            now_time = time.time()
            time_left = time_limit_seconds - (now_time - start_time)
            print("epoch %3d:" % trainer.totalepochs,
                  "  train error: %6.4f%%" % train_algo_error,
                  "  test error: %6.4f%%" % test_algo_error,
                  "  train F1: %s" % ", ".join([("%.2f" % x) for x in trnF1]),
                  "  test F1: %s" % ", ".join([("%.2f" % x) for x in tstF1]),
                  "  %ds left" % int(round(time_left)))

            epochs.append(trainer.totalepochs)
            train_errors.append(trnresult)
            train_F1s.append(trnF1)
            test_errors.append(tstresult)
            test_F1s.append(tstF1)
            train_algo_errors.append(train_algo_error)
            test_algo_errors.append(test_algo_error)
            if time_left <= 0:
                print("Timeout: Time to report the results.")
                break;
            # if test_algo_errors[-1] < 4:
            #     print("Good enough? Don't want to overtrain")
            #     break;

    except KeyboardInterrupt:
        # Someone pressed Ctrl-C, try to still plot the data.
        print("Aborted training...")
        pass

    return (fnn, epochs, train_algo_errors, test_algo_errors, train_F1s, test_F1s)