コード例 #1
0
def set_nn(inp, hid1, out):
    # Make a new FFN object:
    n = FeedForwardNetwork()

    # Constructing the input, output and hidden layers:
    inLayer = LinearLayer(inp)
    hiddenLayer1 = TanhLayer(hid1)
    #    hiddenLayer2 = TanhLayer(hid2)
    outLayer = LinearLayer(out)

    # Adding layers to the network:
    n.addInputModule(inLayer)
    n.addModule(hiddenLayer1)
    #    n.addModule(hiddenLayer2)
    n.addOutputModule(outLayer)

    # determining how neurons should be connected:
    in_to_hidden = FullConnection(inLayer, hiddenLayer1)
    #    hid_to_hid = FullConnection(hiddenLayer1,hiddenLayer2)
    hidden_to_out = FullConnection(hiddenLayer1, outLayer)

    # Adding connections to the network
    n.addConnection(in_to_hidden)
    #    n.addConnection(hid_to_hid)
    n.addConnection(hidden_to_out)

    # Final step that makes our MLP usable
    n.sortModules()
    return n
コード例 #2
0
def testBank():
    D = readData()
    print len(D), 'samples', D.indim, 'features'
    from pybrain import LinearLayer, FullConnection, FeedForwardNetwork, BiasUnit, SigmoidLayer
    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(D.indim, name='in'))
    net.addModule(BiasUnit(name='bias'))
    net.addOutputModule(SigmoidLayer(1, name='out'))
    net.addConnection(FullConnection(net['in'], net['out']))
    net.addConnection(FullConnection(net['bias'], net['out']))
    net.sortModules()
    p = net.params
    p *= 0.01
    provider = ModuleWrapper(D, net, shuffling=False)
    algo = SGD(
        provider,
        net.params.copy(),  #callback=printy, 
        learning_rate=5.5e-5)
    #algo = vSGDfd(provider, net.params.copy(), #callback=printy
    #              )
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
コード例 #3
0
def buildNet(indim, hidden, outdim=2, temperature=1., recurrent=True):
    from pybrain import FullConnection, BiasUnit, TanhLayer, SoftmaxLayer, RecurrentNetwork, LinearLayer, LinearConnection, FeedForwardNetwork, SigmoidLayer
    if recurrent:
        net = RecurrentNetwork()
    else:
        net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(indim, name = 'i'))
    net.addModule(TanhLayer(hidden, name = 'h'))
    net.addModule(BiasUnit('bias'))
    net.addModule(SigmoidLayer(outdim, name = 'unscaled'))
    net.addOutputModule(SoftmaxLayer(outdim, name = 'o'))
    net.addConnection(FullConnection(net['i'], net['h']))
    net.addConnection(FullConnection(net['bias'], net['h']))
    net.addConnection(FullConnection(net['bias'], net['unscaled']))
    net.addConnection(FullConnection(net['h'], net['unscaled']))
    lconn = LinearConnection(net['unscaled'], net['o'])
    lconn._setParameters([1./temperature]*outdim)
    # these are fixed. 
    lconn.paramdim = 0
    net.addConnection(lconn)
    if recurrent:
        net.addRecurrentConnection(FullConnection(net['h'], net['h']))
    net.sortModules()
    print  net
    print 'number of parameters', net.paramdim
    return net
コード例 #4
0
def buildSlicedNetwork():
    """ build a network with shared connections. Two hiddne modules are symetrically linked, but to a different 
    input neuron than the output neuron. The weights are random. """
    N = FeedForwardNetwork('sliced')
    a = LinearLayer(2, name='a')
    b = LinearLayer(2, name='b')
    N.addInputModule(a)
    N.addOutputModule(b)

    N.addConnection(FullConnection(a, b, inSliceTo=1, outSliceFrom=1))
    N.addConnection(FullConnection(a, b, inSliceFrom=1, outSliceTo=1))
    N.sortModules()
    return N
コード例 #5
0
def buildNestedNetwork():
    """ build a nested network. """
    N = FeedForwardNetwork('outer')
    a = LinearLayer(1, name='a')
    b = LinearLayer(2, name='b')
    c = buildNetwork(2, 3, 1)
    c.name = 'inner'
    N.addInputModule(a)
    N.addModule(c)
    N.addOutputModule(b)
    N.addConnection(FullConnection(a, b))
    N.addConnection(FullConnection(b, c))
    N.sortModules()
    return N
コード例 #6
0
def buildMixedNestedNetwork():
    """ build a nested network with the inner one being a ffn and the outer one being recurrent. """
    N = RecurrentNetwork('outer')
    a = LinearLayer(1, name='a')
    b = LinearLayer(2, name='b')
    c = buildNetwork(2, 3, 1)
    c.name = 'inner'
    N.addInputModule(a)
    N.addModule(c)
    N.addOutputModule(b)
    N.addConnection(FullConnection(a, b))
    N.addConnection(FullConnection(b, c))
    N.addRecurrentConnection(FullConnection(c, c))
    N.sortModules()
    return N
コード例 #7
0
def buildSimpleLSTMNetwork(peepholes=False):
    N = RecurrentNetwork('simpleLstmNet')
    i = LinearLayer(1, name='i')
    h = LSTMLayer(1, peepholes=peepholes, name='lstm')
    o = LinearLayer(1, name='o')
    b = BiasUnit('bias')
    N.addModule(b)
    N.addOutputModule(o)
    N.addInputModule(i)
    N.addModule(h)
    N.addConnection(FullConnection(i, h, name='f1'))
    N.addConnection(FullConnection(b, h, name='f2'))
    N.addRecurrentConnection(FullConnection(h, h, name='r1'))
    N.addConnection(FullConnection(h, o, name='r1'))
    N.sortModules()
    return N
コード例 #8
0
def buildRecurrentNetwork():
    N = buildNetwork(1,1,1, recurrent = True, bias = False, hiddenclass = LinearLayer, outputbias = False) 
    h = N['hidden0']
    N.addRecurrentConnection(FullConnection(h, h))
    N.sortModules()
    N.name = 'RecurrentNetwork'
    return N
コード例 #9
0
    def train(self):
        """Train neuron grid by training sample"""

        self.net = FeedForwardNetwork()

        inLayer = LinearLayer(self.input_neurons)
        hiddenLayer = SigmoidLayer(self.hiden_neurons)
        outLayer = LinearLayer(self.OUTPUT_NEURONS)

        self.net.addInputModule(inLayer)

        self.net.addModule(hiddenLayer)
        self.net.addOutputModule(outLayer)

        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_out = FullConnection(hiddenLayer, outLayer)

        self.net.addConnection(in_to_hidden)
        self.net.addConnection(hidden_to_out)
        self.net.sortModules()

        ds = ClassificationDataSet(self.input_neurons,
                                   self.OUTPUT_NEURONS,
                                   nb_classes=3)
        for i, coord in enumerate(self.X):
            ds.addSample(coord, (self.y[i], ))

        trainer = BackpropTrainer(self.net,
                                  dataset=ds,
                                  momentum=0.1,
                                  verbose=True,
                                  weightdecay=0.01)

        if self.maxErr:
            for i in range(self.maxEpochs):
                if trainer.train() < self.maxErr:
                    print "Desired error reached"
                    break
        else:
            trainer.trainUntilConvergence(maxEpochs=self.maxEpochs)

        print "Successfully finished"
コード例 #10
0
def buildCyclicNetwork(recurrent):
    """ build a cyclic network with 4 modules
    @param recurrent: make one of the connections recurrent """
    Network = RecurrentNetwork if recurrent else FeedForwardNetwork
    N = Network('cyc')
    a = LinearLayer(1, name='a')
    b = LinearLayer(2, name='b')
    c = LinearLayer(3, name='c')
    d = LinearLayer(4, name='d')
    N.addInputModule(a)
    N.addModule(b)
    N.addModule(d)
    N.addOutputModule(c)
    N.addConnection(FullConnection(a, b))
    N.addConnection(FullConnection(b, c))
    N.addConnection(FullConnection(c, d))
    if recurrent:
        N.addRecurrentConnection(FullConnection(d, a))
    else:
        N.addConnection(FullConnection(d, a))
    N.sortModules()
    return N
コード例 #11
0
ファイル: test_corn.py プロジェクト: yindlib/py-optim
def prepare():
    """ Shape the dataset, and build the linear classifier """
    from pybrain import LinearLayer, FullConnection, FeedForwardNetwork
    from pybrain.datasets import SupervisedDataSet
    D = SupervisedDataSet(3, 1)
    for c, f, i in data:
        D.addSample([1, f, i], [c])

    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(D.indim, name='in'))
    net.addOutputModule(LinearLayer(1, name='out'))
    net.addConnection(FullConnection(net['in'], net['out']))
    net.sortModules()
    return D, net
コード例 #12
0
def buildSimpleMDLSTMNetwork(peepholes=False):
    N = RecurrentNetwork('simpleMDLstmNet')
    i = LinearLayer(1, name='i')
    dim = 1
    h = MDLSTMLayer(dim, peepholes=peepholes, name='MDlstm')
    o = LinearLayer(1, name='o')
    b = BiasUnit('bias')
    N.addModule(b)
    N.addOutputModule(o)
    N.addInputModule(i)
    N.addModule(h)
    N.addConnection(FullConnection(i, h, outSliceTo=4 * dim, name='f1'))
    N.addConnection(FullConnection(b, h, outSliceTo=4 * dim, name='f2'))
    N.addRecurrentConnection(
        FullConnection(h, h, inSliceTo=dim, outSliceTo=4 * dim, name='r1'))
    N.addRecurrentConnection(
        IdentityConnection(h,
                           h,
                           inSliceFrom=dim,
                           outSliceFrom=4 * dim,
                           name='rstate'))
    N.addConnection(FullConnection(h, o, inSliceTo=dim, name='f3'))
    N.sortModules()
    return N
コード例 #13
0
def testPlot1():
    dim = 15
    from scipy import rand, dot
    from pybrain.datasets import SupervisedDataSet
    from pybrain import LinearLayer, FullConnection, FeedForwardNetwork
    from pybrain.utilities import dense_orth
    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(dim, name='in'))
    net.addOutputModule(LinearLayer(1, name='out'))
    net.addConnection(FullConnection(net['in'], net['out']))
    net.sortModules()

    ds = SupervisedDataSet(dim, 1)
    ds2 = SupervisedDataSet(dim, 1)
    R = dense_orth(dim)
    for _ in range(1000):
        tmp = rand(dim) > 0.5
        tmp2 = dot(tmp, R)
        ds.addSample(tmp, [tmp[-1]])
        ds2.addSample(tmp2, [tmp[-1]])

    f = ModuleWrapper(ds, net)
    f2 = ModuleWrapper(ds2, net)

    # tracking progress by callback
    ltrace = []

    def storer(a):
        ltrace.append(a.provider.currentLosses(a.bestParameters))

    x = net.params
    x *= 0.001

    algo = SGD(f, net.params.copy(), callback=storer, learning_rate=0.2)
    algo.run(1000)
    pylab.plot(ltrace, 'r-')

    del ltrace[:]

    algo = SGD(f2, net.params.copy(), callback=storer, learning_rate=0.2)
    algo.run(1000)
    pylab.plot(ltrace, 'g-')

    pylab.semilogy()
    pylab.show()
コード例 #14
0
ファイル: functions.py プロジェクト: AndreaDellera/Tesi
def create_network(*layers, **options):
    """Build arbitrarily deep networks.

    `layers` should be a list or tuple of integers, that indicate how many
    neurons the layers should have. `bias` and `outputbias` are flags to
    indicate whether the network should have the corresponding biases; both
    default to True.

    To adjust the classes for the layers use the `hiddenclass` and  `outclass`
    parameters, which expect a subclass of :class:`NeuronLayer`.

    If the `recurrent` flag is set, a :class:`RecurrentNetwork` will be created,
    otherwise a :class:`FeedForwardNetwork`.

    If the `fast` flag is set, faster arac networks will be used instead of the
    pybrain implementations."""
    # options
    opt = {
        'bias': True,
        'hiddenclass': SigmoidLayer,
        'outclass': LinearLayer,
        'outputbias': True,
        'peepholes': False,
        'recurrent': False,
        'fast': False,
    }
    for key in options:
        if key not in opt.keys():
            raise NetworkError('buildNetwork unknown option: %s' % key)
        opt[key] = options[key]

    if len(layers) < 2:
        raise NetworkError(
            'buildNetwork needs 2 arguments for input and output layers at least.'
        )

    # Bind the right class to the Network name
    network_map = {
        (False, False): FeedForwardNetwork,
        (True, False): RecurrentNetwork,
    }

    try:
        network_map[(False, True)] = FeedForwardNetwork
        network_map[(True, True)] = RecurrentNetwork
    except NameError:
        if opt['fast']:
            raise NetworkError("No fast networks available.")
    if opt['hiddenclass'].sequential or opt['outclass'].sequential:
        if not opt['recurrent']:
            # CHECKME: a warning here?
            opt['recurrent'] = True

    Network = network_map[opt['recurrent'], opt['fast']]

    n = Network()
    # linear input layer
    n.addInputModule(LinearLayer(layers[0], name='in'))
    # output layer of type 'outclass'
    n.addOutputModule(opt['outclass'](layers[-1], name='out'))

    if opt['bias']:
        # add bias module and connection to out module, if desired
        n.addModule(BiasUnit(name='bias'))

    # arbitrary number of hidden layers of type 'hiddenclass'
    for i, num in enumerate(layers[1:-1]):
        layername = 'hidden%i' % i
        n.addModule(opt['hiddenclass'](num, name=layername))
        if opt['bias'] and i == 0:
            # also connect all the layers with the bias
            n.addConnection(FullConnection(n['bias'], n[layername]))
            n.addConnection(FullConnection(n['bias'], n['out']))

    # network with hidden layer(s), connections from in to first hidden and last hidden to out
    n.addConnection(FullConnection(n['in'], n['hidden0']))
    n.addConnection(FullConnection(n['hidden%i' % (len(layers) - 3)],
                                   n['out']))

    # recurrent connections
    if opt['recurrent']:
        print "Recurrent network"
        n.addRecurrentConnection(FullConnection(n['hidden0'], n['hidden0']))

    n.sortModules()
    return n
コード例 #15
0
def buildSomeConnections(modules):
    """ add a connection from every second to every third module """
    res = []
    for i in range(len(modules) // 3 - 1):
        res.append(FullConnection(modules[i * 2], modules[i * 3 + 1]))
    return res
コード例 #16
0
ファイル: first.py プロジェクト: sara-ahmadzadeh/First-ANN
# Make a new FFN object:
n = FeedForwardNetwork()

# Constructing the input, output and hidden layers:
inLayer = LinearLayer(3)
hiddenLayer = SigmoidLayer(4)
outLayer = LinearLayer(1)

# Adding layers to the network:
n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addOutputModule(outLayer)

# determining how neurons should be connected:
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)

# Adding connections to the network
n.addConnection(in_to_hidden)
n.addConnection(hidden_to_out)

# Final step that makes our MLP usable
n.sortModules()

# Create samples Train & Validation
training_samples = []
validation_samples = []

for i in range((len(ts) - 4) * 2 / 3):
    training_samples.append(ts[i:i + 4])