示例#1
0
def buildNet(indim, hidden, outdim=2, temperature=1., recurrent=True):
    from pybrain import FullConnection, BiasUnit, TanhLayer, SoftmaxLayer, RecurrentNetwork, LinearLayer, LinearConnection, FeedForwardNetwork, SigmoidLayer
    if recurrent:
        net = RecurrentNetwork()
    else:
        net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(indim, name = 'i'))
    net.addModule(TanhLayer(hidden, name = 'h'))
    net.addModule(BiasUnit('bias'))
    net.addModule(SigmoidLayer(outdim, name = 'unscaled'))
    net.addOutputModule(SoftmaxLayer(outdim, name = 'o'))
    net.addConnection(FullConnection(net['i'], net['h']))
    net.addConnection(FullConnection(net['bias'], net['h']))
    net.addConnection(FullConnection(net['bias'], net['unscaled']))
    net.addConnection(FullConnection(net['h'], net['unscaled']))
    lconn = LinearConnection(net['unscaled'], net['o'])
    lconn._setParameters([1./temperature]*outdim)
    # these are fixed. 
    lconn.paramdim = 0
    net.addConnection(lconn)
    if recurrent:
        net.addRecurrentConnection(FullConnection(net['h'], net['h']))
    net.sortModules()
    print  net
    print 'number of parameters', net.paramdim
    return net
示例#2
0
def testBank():
    D = readData()
    print len(D), 'samples', D.indim, 'features'
    from pybrain import LinearLayer, FullConnection, FeedForwardNetwork, BiasUnit, SigmoidLayer
    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(D.indim, name='in'))
    net.addModule(BiasUnit(name='bias'))
    net.addOutputModule(SigmoidLayer(1, name='out'))
    net.addConnection(FullConnection(net['in'], net['out']))
    net.addConnection(FullConnection(net['bias'], net['out']))
    net.sortModules()
    p = net.params
    p *= 0.01
    provider = ModuleWrapper(D, net, shuffling=False)
    algo = SGD(
        provider,
        net.params.copy(),  #callback=printy, 
        learning_rate=5.5e-5)
    #algo = vSGDfd(provider, net.params.copy(), #callback=printy
    #              )
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
示例#3
0
def buildSimpleLSTMNetwork(peepholes=False):
    N = RecurrentNetwork('simpleLstmNet')
    i = LinearLayer(1, name='i')
    h = LSTMLayer(1, peepholes=peepholes, name='lstm')
    o = LinearLayer(1, name='o')
    b = BiasUnit('bias')
    N.addModule(b)
    N.addOutputModule(o)
    N.addInputModule(i)
    N.addModule(h)
    N.addConnection(FullConnection(i, h, name='f1'))
    N.addConnection(FullConnection(b, h, name='f2'))
    N.addRecurrentConnection(FullConnection(h, h, name='r1'))
    N.addConnection(FullConnection(h, o, name='r1'))
    N.sortModules()
    return N
示例#4
0
def buildSimpleMDLSTMNetwork(peepholes=False):
    N = RecurrentNetwork('simpleMDLstmNet')
    i = LinearLayer(1, name='i')
    dim = 1
    h = MDLSTMLayer(dim, peepholes=peepholes, name='MDlstm')
    o = LinearLayer(1, name='o')
    b = BiasUnit('bias')
    N.addModule(b)
    N.addOutputModule(o)
    N.addInputModule(i)
    N.addModule(h)
    N.addConnection(FullConnection(i, h, outSliceTo=4 * dim, name='f1'))
    N.addConnection(FullConnection(b, h, outSliceTo=4 * dim, name='f2'))
    N.addRecurrentConnection(
        FullConnection(h, h, inSliceTo=dim, outSliceTo=4 * dim, name='r1'))
    N.addRecurrentConnection(
        IdentityConnection(h,
                           h,
                           inSliceFrom=dim,
                           outSliceFrom=4 * dim,
                           name='rstate'))
    N.addConnection(FullConnection(h, o, inSliceTo=dim, name='f3'))
    N.sortModules()
    return N
示例#5
0
def create_network(*layers, **options):
    """Build arbitrarily deep networks.

    `layers` should be a list or tuple of integers, that indicate how many
    neurons the layers should have. `bias` and `outputbias` are flags to
    indicate whether the network should have the corresponding biases; both
    default to True.

    To adjust the classes for the layers use the `hiddenclass` and  `outclass`
    parameters, which expect a subclass of :class:`NeuronLayer`.

    If the `recurrent` flag is set, a :class:`RecurrentNetwork` will be created,
    otherwise a :class:`FeedForwardNetwork`.

    If the `fast` flag is set, faster arac networks will be used instead of the
    pybrain implementations."""
    # options
    opt = {
        'bias': True,
        'hiddenclass': SigmoidLayer,
        'outclass': LinearLayer,
        'outputbias': True,
        'peepholes': False,
        'recurrent': False,
        'fast': False,
    }
    for key in options:
        if key not in opt.keys():
            raise NetworkError('buildNetwork unknown option: %s' % key)
        opt[key] = options[key]

    if len(layers) < 2:
        raise NetworkError(
            'buildNetwork needs 2 arguments for input and output layers at least.'
        )

    # Bind the right class to the Network name
    network_map = {
        (False, False): FeedForwardNetwork,
        (True, False): RecurrentNetwork,
    }

    try:
        network_map[(False, True)] = FeedForwardNetwork
        network_map[(True, True)] = RecurrentNetwork
    except NameError:
        if opt['fast']:
            raise NetworkError("No fast networks available.")
    if opt['hiddenclass'].sequential or opt['outclass'].sequential:
        if not opt['recurrent']:
            # CHECKME: a warning here?
            opt['recurrent'] = True

    Network = network_map[opt['recurrent'], opt['fast']]

    n = Network()
    # linear input layer
    n.addInputModule(LinearLayer(layers[0], name='in'))
    # output layer of type 'outclass'
    n.addOutputModule(opt['outclass'](layers[-1], name='out'))

    if opt['bias']:
        # add bias module and connection to out module, if desired
        n.addModule(BiasUnit(name='bias'))

    # arbitrary number of hidden layers of type 'hiddenclass'
    for i, num in enumerate(layers[1:-1]):
        layername = 'hidden%i' % i
        n.addModule(opt['hiddenclass'](num, name=layername))
        if opt['bias'] and i == 0:
            # also connect all the layers with the bias
            n.addConnection(FullConnection(n['bias'], n[layername]))
            n.addConnection(FullConnection(n['bias'], n['out']))

    # network with hidden layer(s), connections from in to first hidden and last hidden to out
    n.addConnection(FullConnection(n['in'], n['hidden0']))
    n.addConnection(FullConnection(n['hidden%i' % (len(layers) - 3)],
                                   n['out']))

    # recurrent connections
    if opt['recurrent']:
        print "Recurrent network"
        n.addRecurrentConnection(FullConnection(n['hidden0'], n['hidden0']))

    n.sortModules()
    return n