Exemplo n.º 1
0
def set_nn(inp, hid1, out):
    # Make a new FFN object:
    n = FeedForwardNetwork()

    # Constructing the input, output and hidden layers:
    inLayer = LinearLayer(inp)
    hiddenLayer1 = TanhLayer(hid1)
    #    hiddenLayer2 = TanhLayer(hid2)
    outLayer = LinearLayer(out)

    # Adding layers to the network:
    n.addInputModule(inLayer)
    n.addModule(hiddenLayer1)
    #    n.addModule(hiddenLayer2)
    n.addOutputModule(outLayer)

    # determining how neurons should be connected:
    in_to_hidden = FullConnection(inLayer, hiddenLayer1)
    #    hid_to_hid = FullConnection(hiddenLayer1,hiddenLayer2)
    hidden_to_out = FullConnection(hiddenLayer1, outLayer)

    # Adding connections to the network
    n.addConnection(in_to_hidden)
    #    n.addConnection(hid_to_hid)
    n.addConnection(hidden_to_out)

    # Final step that makes our MLP usable
    n.sortModules()
    return n
def buildSharedCrossedNetwork():
    """ build a network with shared connections. Two hiddne modules are symetrically linked, but to a different 
    input neuron than the output neuron. The weights are random. """
    N = FeedForwardNetwork('shared-crossed')
    h = 1
    a = LinearLayer(2, name='a')
    b = LinearLayer(h, name='b')
    c = LinearLayer(h, name='c')
    d = LinearLayer(2, name='d')
    N.addInputModule(a)
    N.addModule(b)
    N.addModule(c)
    N.addOutputModule(d)

    m1 = MotherConnection(h)
    m1.params[:] = scipy.array((1, ))

    m2 = MotherConnection(h)
    m2.params[:] = scipy.array((2, ))

    N.addConnection(SharedFullConnection(m1, a, b, inSliceTo=1))
    N.addConnection(SharedFullConnection(m1, a, c, inSliceFrom=1))
    N.addConnection(SharedFullConnection(m2, b, d, outSliceFrom=1))
    N.addConnection(SharedFullConnection(m2, c, d, outSliceTo=1))
    N.sortModules()
    return N
Exemplo n.º 3
0
def constructPerceptron(name, numNeurons):
    """Возвращает необученную сеть
    Аргументы:
    name -- имя сети, строка
    numNeurons -- число нейронов в каждом слое, список из целых чисел
    """
    # Создаём сеть
    net = FeedForwardNetwork(name)
    # Создаём слои и добавляем их в сеть
    prevLayer = None
    newLayer = None
    for i, val in enumerate(numNeurons):
        # Если слой входной, он линейный
        if (i == 0):
            newLayer = LinearLayer(val, 'input')
            net.addInputModule(newLayer)
            prevLayer = newLayer
        # Если слой выходной, он линейный
        elif (i == len(numNeurons) - 1):
            newLayer = LinearLayer(val, 'output')
            net.addOutputModule(newLayer)
        # Иначе - слой сигмоидный
        else:
            newLayer = SigmoidLayer(val, 'hidden_' + str(i))
            net.addModule(newLayer)
            # Если слой не входной, создаём связь между новым и предыдущим слоями
        if (i > 0):
            conn = FullConnection(prevLayer, newLayer, 'conn_' + str(i))
            net.addConnection(conn)
            prevLayer = newLayer
    # Готовим сеть к активации, упорядочивая её внутреннюю структуру
    net.sortModules()
    # Готово
    return net
    def __init__(self, dims, **args):
        """ The one required argument specifies the sizes of each dimension (minimum 2) """
        assert len(dims) == 2
        SwipingNetwork.__init__(self, dims=dims, **args)
        if self.mariopos == None:
            self.mariopos = (dims[0] / 2, dims[1] / 2)

        pdims = product(dims)
        # the input is a 2D-mesh (as a view on a flat input layer)
        inmod = LinearLayer(self.insize * pdims, name='input')
        inmesh = ModuleMesh.viewOnFlatLayer(inmod, dims, 'inmesh')

        # the output is a 2D-mesh (as a view on a flat sigmoid output layer)
        outmod = self.outcomponentclass(self.outputs * pdims, name='output')
        outmesh = ModuleMesh.viewOnFlatLayer(outmod, dims, 'outmesh')

        if self.componentclass is MDLSTMLayer:
            c = lambda: MDLSTMLayer(self.hsize, 2, self.peepholes).meatSlice()
            hiddenmesh = ModuleMesh(c, (self.size, self.size, 4),
                                    'hidden',
                                    baserename=True)
        else:
            hiddenmesh = ModuleMesh.constructWithLayers(
                self.componentclass, self.hsize,
                tuple(list(dims) + [self.swipes]), 'hidden')

        self._buildSwipingStructure(inmesh, hiddenmesh, outmesh)

        o = LinearLayer(self.outputs)
        self.addConnection(IdentityConnection(outmesh[self.mariopos], o))
        self.outmodules = []
        self.addOutputModule(o)

        # add the identity connections for the states
        for m in self.modules:
            if isinstance(m, MDLSTMLayer):
                tmp = m.stateSlice()
                index = 0
                for c in list(self.connections[m]):
                    if isinstance(c.outmod, MDLSTMLayer):
                        self.addConnection(
                            IdentityConnection(
                                tmp,
                                c.outmod.stateSlice(),
                                outSliceFrom=self.hsize * (index),
                                outSliceTo=self.hsize * (index + 1)))
                        index += 1

        # special inputs
        self.addInputModule(LinearLayer(2, name='specialin'))
        self.addConnection(FullConnection(self['specialin'], o))

        self.sortModules()
def buildSimpleBorderSwipingNet(size = 3, dim = 3, hsize = 1, predefined = {}):
    """ build a simple swiping network,of given size and dimension, using linear inputs and output"""
    # assuming identical size in all dimensions
    dims = tuple([size]*dim)
    # also includes one dimension for the swipes
    hdims = tuple(list(dims)+[2**dim])
    inmod = LinearLayer(size**dim, name = 'input')
    inmesh = ModuleMesh.viewOnFlatLayer(inmod, dims, 'inmesh')
    outmod = LinearLayer(size**dim, name = 'output')
    outmesh = ModuleMesh.viewOnFlatLayer(outmod, dims, 'outmesh')
    hiddenmesh = ModuleMesh.constructWithLayers(TanhLayer, hsize, hdims, 'hidden')
    return BorderSwipingNetwork(inmesh, hiddenmesh, outmesh, predefined = predefined)
Exemplo n.º 6
0
def buildSlicedNetwork():
    """ build a network with shared connections. Two hiddne modules are symetrically linked, but to a different 
    input neuron than the output neuron. The weights are random. """
    N = FeedForwardNetwork('sliced')
    a = LinearLayer(2, name='a')
    b = LinearLayer(2, name='b')
    N.addInputModule(a)
    N.addOutputModule(b)

    N.addConnection(FullConnection(a, b, inSliceTo=1, outSliceFrom=1))
    N.addConnection(FullConnection(a, b, inSliceFrom=1, outSliceTo=1))
    N.sortModules()
    return N
Exemplo n.º 7
0
    def _buildCaptureNetwork(self):
        # the input is a 2D-mesh (as a view on a flat input layer)
        inmod = LinearLayer(self.insize * self.size * self.size, name='input')
        inmesh = ModuleMesh.viewOnFlatLayer(inmod, (self.size, self.size),
                                            'inmesh')

        # the output is a 2D-mesh (as a view on a flat sigmoid output layer)
        outmod = self.outcomponentclass(self.outputs * self.size * self.size,
                                        name='output')
        outmesh = ModuleMesh.viewOnFlatLayer(outmod, (self.size, self.size),
                                             'outmesh')

        if self.componentclass is MDLSTMLayer:
            c = lambda: MDLSTMLayer(self.hsize, 2, self.peepholes).meatSlice()
            hiddenmesh = ModuleMesh(c, (self.size, self.size, 4),
                                    'hidden',
                                    baserename=True)
        else:
            hiddenmesh = ModuleMesh.constructWithLayers(
                self.componentclass, self.hsize, (self.size, self.size, 4),
                'hidden')

        self._buildBorderStructure(inmesh, hiddenmesh, outmesh)

        # add the identity connections for the states
        for m in self.modules:
            if isinstance(m, MDLSTMLayer):
                tmp = m.stateSlice()
                index = 0
                for c in list(self.connections[m]):
                    if isinstance(c.outmod, MDLSTMLayer):
                        self.addConnection(
                            IdentityConnection(
                                tmp,
                                c.outmod.stateSlice(),
                                outSliceFrom=self.hsize * (index),
                                outSliceTo=self.hsize * (index + 1)))
                        index += 1
        # direct connections between input and output
        if self.directlink:
            self._buildDirectLink(inmesh, outmesh)

        # combined inputs
        if self.combinputs > 0:
            cin = LinearLayer(self.combinputs, name='globalin')
            self.addInputModule(cin)
            if 'globalinconn' not in self.predefined:
                self.predefined['globalinconn'] = MotherConnection(
                    cin.componentOutdim * hiddenmesh.componentIndim,
                    'globalinconn')
            self._linkToAll(cin, hiddenmesh, self.predefined['globalinconn'])
Exemplo n.º 8
0
def prepare():
    """ Shape the dataset, and build the linear classifier """
    from pybrain import LinearLayer, FullConnection, FeedForwardNetwork
    from pybrain.datasets import SupervisedDataSet
    D = SupervisedDataSet(3, 1)
    for c, f, i in data:
        D.addSample([1, f, i], [c])

    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(D.indim, name='in'))
    net.addOutputModule(LinearLayer(1, name='out'))
    net.addConnection(FullConnection(net['in'], net['out']))
    net.sortModules()
    return D, net
Exemplo n.º 9
0
def buildNestedNetwork():
    """ build a nested network. """
    N = FeedForwardNetwork('outer')
    a = LinearLayer(1, name='a')
    b = LinearLayer(2, name='b')
    c = buildNetwork(2, 3, 1)
    c.name = 'inner'
    N.addInputModule(a)
    N.addModule(c)
    N.addOutputModule(b)
    N.addConnection(FullConnection(a, b))
    N.addConnection(FullConnection(b, c))
    N.sortModules()
    return N
Exemplo n.º 10
0
def buildMixedNestedNetwork():
    """ build a nested network with the inner one being a ffn and the outer one being recurrent. """
    N = RecurrentNetwork('outer')
    a = LinearLayer(1, name='a')
    b = LinearLayer(2, name='b')
    c = buildNetwork(2, 3, 1)
    c.name = 'inner'
    N.addInputModule(a)
    N.addModule(c)
    N.addOutputModule(b)
    N.addConnection(FullConnection(a, b))
    N.addConnection(FullConnection(b, c))
    N.addRecurrentConnection(FullConnection(c, c))
    N.sortModules()
    return N
Exemplo n.º 11
0
def buildSimpleLSTMNetwork(peepholes=False):
    N = RecurrentNetwork('simpleLstmNet')
    i = LinearLayer(1, name='i')
    h = LSTMLayer(1, peepholes=peepholes, name='lstm')
    o = LinearLayer(1, name='o')
    b = BiasUnit('bias')
    N.addModule(b)
    N.addOutputModule(o)
    N.addInputModule(i)
    N.addModule(h)
    N.addConnection(FullConnection(i, h, name='f1'))
    N.addConnection(FullConnection(b, h, name='f2'))
    N.addRecurrentConnection(FullConnection(h, h, name='r1'))
    N.addConnection(FullConnection(h, o, name='r1'))
    N.sortModules()
    return N
Exemplo n.º 12
0
def testBank():
    D = readData()
    print len(D), 'samples', D.indim, 'features'
    from pybrain import LinearLayer, FullConnection, FeedForwardNetwork, BiasUnit, SigmoidLayer
    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(D.indim, name='in'))
    net.addModule(BiasUnit(name='bias'))
    net.addOutputModule(SigmoidLayer(1, name='out'))
    net.addConnection(FullConnection(net['in'], net['out']))
    net.addConnection(FullConnection(net['bias'], net['out']))
    net.sortModules()
    p = net.params
    p *= 0.01
    provider = ModuleWrapper(D, net, shuffling=False)
    algo = SGD(
        provider,
        net.params.copy(),  #callback=printy, 
        learning_rate=5.5e-5)
    #algo = vSGDfd(provider, net.params.copy(), #callback=printy
    #              )
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
Exemplo n.º 13
0
def buildNet(indim, hidden, outdim=2, temperature=1., recurrent=True):
    from pybrain import FullConnection, BiasUnit, TanhLayer, SoftmaxLayer, RecurrentNetwork, LinearLayer, LinearConnection, FeedForwardNetwork, SigmoidLayer
    if recurrent:
        net = RecurrentNetwork()
    else:
        net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(indim, name = 'i'))
    net.addModule(TanhLayer(hidden, name = 'h'))
    net.addModule(BiasUnit('bias'))
    net.addModule(SigmoidLayer(outdim, name = 'unscaled'))
    net.addOutputModule(SoftmaxLayer(outdim, name = 'o'))
    net.addConnection(FullConnection(net['i'], net['h']))
    net.addConnection(FullConnection(net['bias'], net['h']))
    net.addConnection(FullConnection(net['bias'], net['unscaled']))
    net.addConnection(FullConnection(net['h'], net['unscaled']))
    lconn = LinearConnection(net['unscaled'], net['o'])
    lconn._setParameters([1./temperature]*outdim)
    # these are fixed. 
    lconn.paramdim = 0
    net.addConnection(lconn)
    if recurrent:
        net.addRecurrentConnection(FullConnection(net['h'], net['h']))
    net.sortModules()
    print  net
    print 'number of parameters', net.paramdim
    return net
Exemplo n.º 14
0
def testPlot1():
    dim = 15
    from scipy import rand, dot
    from pybrain.datasets import SupervisedDataSet
    from pybrain import LinearLayer, FullConnection, FeedForwardNetwork
    from pybrain.utilities import dense_orth
    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(dim, name='in'))
    net.addOutputModule(LinearLayer(1, name='out'))
    net.addConnection(FullConnection(net['in'], net['out']))
    net.sortModules()

    ds = SupervisedDataSet(dim, 1)
    ds2 = SupervisedDataSet(dim, 1)
    R = dense_orth(dim)
    for _ in range(1000):
        tmp = rand(dim) > 0.5
        tmp2 = dot(tmp, R)
        ds.addSample(tmp, [tmp[-1]])
        ds2.addSample(tmp2, [tmp[-1]])

    f = ModuleWrapper(ds, net)
    f2 = ModuleWrapper(ds2, net)

    # tracking progress by callback
    ltrace = []

    def storer(a):
        ltrace.append(a.provider.currentLosses(a.bestParameters))

    x = net.params
    x *= 0.001

    algo = SGD(f, net.params.copy(), callback=storer, learning_rate=0.2)
    algo.run(1000)
    pylab.plot(ltrace, 'r-')

    del ltrace[:]

    algo = SGD(f2, net.params.copy(), callback=storer, learning_rate=0.2)
    algo.run(1000)
    pylab.plot(ltrace, 'g-')

    pylab.semilogy()
    pylab.show()
Exemplo n.º 15
0
    def train(self):
        """Train neuron grid by training sample"""

        self.net = FeedForwardNetwork()

        inLayer = LinearLayer(self.input_neurons)
        hiddenLayer = SigmoidLayer(self.hiden_neurons)
        outLayer = LinearLayer(self.OUTPUT_NEURONS)

        self.net.addInputModule(inLayer)

        self.net.addModule(hiddenLayer)
        self.net.addOutputModule(outLayer)

        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_out = FullConnection(hiddenLayer, outLayer)

        self.net.addConnection(in_to_hidden)
        self.net.addConnection(hidden_to_out)
        self.net.sortModules()

        ds = ClassificationDataSet(self.input_neurons,
                                   self.OUTPUT_NEURONS,
                                   nb_classes=3)
        for i, coord in enumerate(self.X):
            ds.addSample(coord, (self.y[i], ))

        trainer = BackpropTrainer(self.net,
                                  dataset=ds,
                                  momentum=0.1,
                                  verbose=True,
                                  weightdecay=0.01)

        if self.maxErr:
            for i in range(self.maxEpochs):
                if trainer.train() < self.maxErr:
                    print "Desired error reached"
                    break
        else:
            trainer.trainUntilConvergence(maxEpochs=self.maxEpochs)

        print "Successfully finished"
Exemplo n.º 16
0
def buildCyclicNetwork(recurrent):
    """ build a cyclic network with 4 modules
    @param recurrent: make one of the connections recurrent """
    Network = RecurrentNetwork if recurrent else FeedForwardNetwork
    N = Network('cyc')
    a = LinearLayer(1, name='a')
    b = LinearLayer(2, name='b')
    c = LinearLayer(3, name='c')
    d = LinearLayer(4, name='d')
    N.addInputModule(a)
    N.addModule(b)
    N.addModule(d)
    N.addOutputModule(c)
    N.addConnection(FullConnection(a, b))
    N.addConnection(FullConnection(b, c))
    N.addConnection(FullConnection(c, d))
    if recurrent:
        N.addRecurrentConnection(FullConnection(d, a))
    else:
        N.addConnection(FullConnection(d, a))
    N.sortModules()
    return N
Exemplo n.º 17
0
def buildSimpleMDLSTMNetwork(peepholes=False):
    N = RecurrentNetwork('simpleMDLstmNet')
    i = LinearLayer(1, name='i')
    dim = 1
    h = MDLSTMLayer(dim, peepholes=peepholes, name='MDlstm')
    o = LinearLayer(1, name='o')
    b = BiasUnit('bias')
    N.addModule(b)
    N.addOutputModule(o)
    N.addInputModule(i)
    N.addModule(h)
    N.addConnection(FullConnection(i, h, outSliceTo=4 * dim, name='f1'))
    N.addConnection(FullConnection(b, h, outSliceTo=4 * dim, name='f2'))
    N.addRecurrentConnection(
        FullConnection(h, h, inSliceTo=dim, outSliceTo=4 * dim, name='r1'))
    N.addRecurrentConnection(
        IdentityConnection(h,
                           h,
                           inSliceFrom=dim,
                           outSliceFrom=4 * dim,
                           name='rstate'))
    N.addConnection(FullConnection(h, o, inSliceTo=dim, name='f3'))
    N.sortModules()
    return N
Exemplo n.º 18
0
def create_network(*layers, **options):
    """Build arbitrarily deep networks.

    `layers` should be a list or tuple of integers, that indicate how many
    neurons the layers should have. `bias` and `outputbias` are flags to
    indicate whether the network should have the corresponding biases; both
    default to True.

    To adjust the classes for the layers use the `hiddenclass` and  `outclass`
    parameters, which expect a subclass of :class:`NeuronLayer`.

    If the `recurrent` flag is set, a :class:`RecurrentNetwork` will be created,
    otherwise a :class:`FeedForwardNetwork`.

    If the `fast` flag is set, faster arac networks will be used instead of the
    pybrain implementations."""
    # options
    opt = {
        'bias': True,
        'hiddenclass': SigmoidLayer,
        'outclass': LinearLayer,
        'outputbias': True,
        'peepholes': False,
        'recurrent': False,
        'fast': False,
    }
    for key in options:
        if key not in opt.keys():
            raise NetworkError('buildNetwork unknown option: %s' % key)
        opt[key] = options[key]

    if len(layers) < 2:
        raise NetworkError(
            'buildNetwork needs 2 arguments for input and output layers at least.'
        )

    # Bind the right class to the Network name
    network_map = {
        (False, False): FeedForwardNetwork,
        (True, False): RecurrentNetwork,
    }

    try:
        network_map[(False, True)] = FeedForwardNetwork
        network_map[(True, True)] = RecurrentNetwork
    except NameError:
        if opt['fast']:
            raise NetworkError("No fast networks available.")
    if opt['hiddenclass'].sequential or opt['outclass'].sequential:
        if not opt['recurrent']:
            # CHECKME: a warning here?
            opt['recurrent'] = True

    Network = network_map[opt['recurrent'], opt['fast']]

    n = Network()
    # linear input layer
    n.addInputModule(LinearLayer(layers[0], name='in'))
    # output layer of type 'outclass'
    n.addOutputModule(opt['outclass'](layers[-1], name='out'))

    if opt['bias']:
        # add bias module and connection to out module, if desired
        n.addModule(BiasUnit(name='bias'))

    # arbitrary number of hidden layers of type 'hiddenclass'
    for i, num in enumerate(layers[1:-1]):
        layername = 'hidden%i' % i
        n.addModule(opt['hiddenclass'](num, name=layername))
        if opt['bias'] and i == 0:
            # also connect all the layers with the bias
            n.addConnection(FullConnection(n['bias'], n[layername]))
            n.addConnection(FullConnection(n['bias'], n['out']))

    # network with hidden layer(s), connections from in to first hidden and last hidden to out
    n.addConnection(FullConnection(n['in'], n['hidden0']))
    n.addConnection(FullConnection(n['hidden%i' % (len(layers) - 3)],
                                   n['out']))

    # recurrent connections
    if opt['recurrent']:
        print "Recurrent network"
        n.addRecurrentConnection(FullConnection(n['hidden0'], n['hidden0']))

    n.sortModules()
    return n
Exemplo n.º 19
0
from pybrain.datasets import SupervisedDataSet
from pybrain.supervised.trainers import BackpropTrainer
import matplotlib.pylab as plt

dateparse = lambda dates: pd.datetime.strptime(dates, '%Y-%m')
data = pd.read_csv('AirPassengers.csv',
                   parse_dates=['Month'],
                   index_col='Month',
                   date_parser=dateparse)
ts = data['#Passengers']

# Make a new FFN object:
n = FeedForwardNetwork()

# Constructing the input, output and hidden layers:
inLayer = LinearLayer(3)
hiddenLayer = SigmoidLayer(4)
outLayer = LinearLayer(1)

# Adding layers to the network:
n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addOutputModule(outLayer)

# determining how neurons should be connected:
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)

# Adding connections to the network
n.addConnection(in_to_hidden)
n.addConnection(hidden_to_out)
Exemplo n.º 20
0
def buildSomeModules(number=4):
    res = []
    for i in range(number):
        res.append(LinearLayer(1, 'l' + str(i)))
    return res
Exemplo n.º 21
0
from pybrain.datasets.supervised import SupervisedDataSet
from irisdataset import IRIS_TEST_SET, IRIS_TRAIN_SET

# Because the version of pybrain from pip isn't up to date
class ReluLayer(NeuronLayer):
    """ Layer of rectified linear units (relu). """

    def _forwardImplementation(self, inbuf, outbuf):
        outbuf[:] = inbuf * (inbuf > 0)

    def _backwardImplementation(self, outerr, inerr, outbuf, inbuf):
        inerr[:] = outerr * (inbuf > 0)

net = FeedForwardNetwork()

inLayer = LinearLayer(4, name="in")
hidden0 = SigmoidLayer(5, name="hidden0")
#hidden1 = SigmoidLayer(5, name="hidden1")
outLayer = SigmoidLayer(3, name="out")

net.addInputModule(inLayer)
net.addModule(hidden0)
#net.addModule(hidden1)
net.addOutputModule(outLayer)

def init_params(conn):
    for i in range(len(conn.params)):
        conn.params[i] = random.uniform(-0.2, 0.2)

in2Hidden = FullConnection(inLayer, hidden0)
#hidden01 = FullConnection(hidden0, hidden1)