def createNetwork():
    # create network and layers
    net = FeedForwardNetwork()
    in_layer = LinearLayer(16)
    hid1_layer = SigmoidLayer(20)
    hid2_layer = SigmoidLayer(20)
    out_layer = SigmoidLayer(2)

    # add layers to network
    net.addInputModule(in_layer)
    net.addModule(hid1_layer)
    net.addModule(hid2_layer)
    net.addOutputModule(out_layer)

    # create connections between layers
    in_to_hid1 = FullConnection(in_layer, hid1_layer)
    hid1_to_hid2 = FullConnection(hid1_layer, hid2_layer)
    hid2_to_out = FullConnection(hid2_layer, out_layer)

    # add connections to network
    net.addConnection(in_to_hid1)
    net.addConnection(hid1_to_hid2)
    net.addConnection(hid2_to_out)

    # sort modules
    net.sortModules()

    return net
Example #2
0
 def __init__(self, arg):
     self.inputsize = arg[0]
     self.outputsize = arg[-1]
     self.hiden = arg[1:-1]
     self.err = 1
     self.old_err = 1
     b = []
     b.append(self.inputsize)
     b += self.hiden
     b.append(self.outputsize)
     #print b#"%s, %s, %s, hiddenclass=TanhLayer"%(self.inputsize, self.hiden, self.outputsize)
     self.net = FeedForwardNetwork()
     self.inputlayer = LinearLayer(self.inputsize, "Input")
     self.net.addInputModule(self.inputlayer)
     self.outputlayer = LinearLayer(self.outputsize, "Output")
     self.net.addOutputModule(self.outputlayer)
     self.hidenlayers = []
     for i in xrange(len(self.hiden)):
         self.hidenlayers.append(SigmoidLayer(self.hiden[i], "hiden%s" % i))
         self.net.addModule(self.hidenlayers[-1])
     self.net.addConnection(
         FullConnection(self.inputlayer, self.outputlayer))
     for i in xrange(len(self.hidenlayers)):
         self.net.addConnection(
             FullConnection(self.inputlayer, self.hidenlayers[i]))
         self.net.addConnection(
             FullConnection(self.hidenlayers[i], self.outputlayer))
     for i in xrange(len(self.hidenlayers)):
         for j in xrange(i + 1, len(self.hidenlayers)):
             self.net.addConnection(
                 FullConnection(self.hidenlayers[i], self.hidenlayers[j]))
             #self.print_conections(self.net)
     self.net.sortModules()
     self.ds = SupervisedDataSet(self.inputsize, self.outputsize)
Example #3
0
def anntrain(xdata,ydata):#,epochs):
    #print len(xdata[0])
    ds=SupervisedDataSet(len(xdata[0]),1)
    #ds=ClassificationDataSet(len(xdata[0]),1, nb_classes=2)
    for i,algo in enumerate (xdata):
        ds.addSample(algo,ydata[i])
    #ds._convertToOneOfMany( ) esto no
    net= FeedForwardNetwork()
    inp=LinearLayer(len(xdata[0]))
    h1=SigmoidLayer(1)
    outp=LinearLayer(1)
    net.addOutputModule(outp) 
    net.addInputModule(inp) 
    net.addModule(h1)
    #net=buildNetwork(len(xdata[0]),1,1,hiddenclass=TanhLayer,outclass=SoftmaxLayer)
    
    net.addConnection(FullConnection(inp, h1))  
    net.addConnection(FullConnection(h1, outp))

    net.sortModules()

    trainer=BackpropTrainer(net,ds)#, verbose=True)#dataset=ds,verbose=True)
    #trainer.trainEpochs(40)
    trainer.trainOnDataset(ds,40) 
    #trainer.trainUntilConvergence(ds, 20, verbose=True, validationProportion=0.15)
    trainer.testOnData()#verbose=True)
    #print 'Final weights:',net.params
    return net
def createNN():
    nn = FeedForwardNetwork()
    inLayer = TanhLayer(4, name='in')
    hiddenLayer = TanhLayer(6, name='hidden0')
    outLayer = ThresholdLayer(3)
    nn.addInputModule(inLayer)
    nn.addModule(hiddenLayer)
    nn.addOutputModule(outLayer)
    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_out = FullConnection(hiddenLayer, outLayer)
    nn.addConnection(in_to_hidden)
    nn.addConnection(hidden_to_out)
    nn.sortModules()
    return nn
Example #5
0
    def buildLSTMNetwork(self):
        # create network and modules
        net = RecurrentNetwork()
        inp = LinearLayer(self.n_input, name="Input")
        h1 = LSTMLayer(3, name='LSTM')
        h2 = SigmoidLayer(10, name='sigm')
        outp = LinearLayer(self.numActions, name='output')
        # add modules
        net.addOutputModule(outp)
        net.addInputModule(inp)
        net.addModule(h1)
        net.addModule(h2)
        # create connections from input
        net.addConnection(FullConnection(inp, h1, name="input_LSTM"))
        net.addConnection(FullConnection(inp, h2, name="input_sigm"))
        # create connections from LSTM
        net.addConnection(FullConnection(h1, h2, name="LSTM_sigm"))

        # add whichever recurrent connections
        net.addRecurrentConnection(FullConnection(h1, h1, name='LSTM_rec'))
        net.addRecurrentConnection(FullConnection(h2, h1,
                                                  name='sigm_LSTM_rec'))
        # create connections to output
        net.addConnection(FullConnection(h1, outp, name="LSTM_outp"))
        net.addConnection(FullConnection(h2, outp, name="sigm_outp"))

        # finish up
        net.sortModules()
        net.randomize()
        self.printModules(net)
        self.e = [0 for param in range(len(net.params))]
        # for each action, need to accumulate the gradient
        self.accumulated_gradients = [[0 for param in range(len(net.params))]
                                      for i in range(self.numActions)]
        return net
Example #6
0
 def __init__(self, inputsize, outputsize, hiden=[1]):
     self.inputsize = inputsize
     self.outputsize = outputsize
     self.hiden = hiden
     self.err = 1
     self.old_err = 1
     #print type(self.hiden)
     if type(self.hiden) == str:
         #print "type str"
         self.hiden = self.hiden[1:-1]
         b = self.hiden.split(", ")
         c = []
         for i in b:
             c.append(int(i))
         self.hiden = c[:]
     b = []
     b.append(self.inputsize)
     b += self.hiden
     b.append(self.outputsize)
     #print b#"%s, %s, %s, hiddenclass=TanhLayer"%(self.inputsize, self.hiden, self.outputsize)
     self.net = FeedForwardNetwork()
     self.inputlayer = LinearLayer(self.inputsize, "Input")
     self.net.addInputModule(self.inputlayer)
     self.outputlayer = LinearLayer(self.outputsize, "Output")
     self.net.addOutputModule(self.outputlayer)
     self.hidenlayers = []
     for i in xrange(len(self.hiden)):
         self.hidenlayers.append(SigmoidLayer(self.hiden[i], "hiden%s" % i))
         self.net.addModule(self.hidenlayers[-1])
     self.net.addConnection(
         FullConnection(self.inputlayer, self.outputlayer))
     for i in xrange(len(self.hidenlayers)):
         self.net.addConnection(
             FullConnection(self.inputlayer, self.hidenlayers[i]))
         self.net.addConnection(
             FullConnection(self.hidenlayers[i], self.outputlayer))
     for i in xrange(len(self.hidenlayers)):
         for j in xrange(i + 1, len(self.hidenlayers)):
             self.net.addConnection(
                 FullConnection(self.hidenlayers[i], self.hidenlayers[j]))
             #self.print_conections(self.net)
     self.net.sortModules()
     self.ds = SupervisedDataSet(self.inputsize, self.outputsize)
Example #7
0
    def buildTDnetwork(self):
        # create network and modules
        net = FeedForwardNetwork()
        inp = LinearLayer(self.n_input, name="Input")
        h1 = SigmoidLayer(10, name='sigm')
        outp = LinearLayer(1, name='output')
        # add modules
        net.addOutputModule(outp)
        net.addInputModule(inp)
        net.addModule(h1)
        # create connections from input
        net.addConnection(FullConnection(inp, h1, name="input_LSTM"))

        # create connections to output
        net.addConnection(FullConnection(h1, outp, name="LSTM_outp"))

        # finish up
        net.sortModules()
        net.randomize()

        return net
Example #8
0
def custom_build_network(layer_sizes):
    net = FeedForwardNetwork()

    layers = []
    inp = SigmoidLayer(layer_sizes[0], name='visible')
    h1 = SigmoidLayer(layer_sizes[1], name='hidden1')
    h2 = SigmoidLayer(layer_sizes[2], name='hidden2')
    out = SigmoidLayer(layer_sizes[3], name='out')
    bias = BiasUnit(name='bias')

    net.addInputModule(inp)
    net.addModule(h1)
    net.addModule(h2)
    net.addOutputModule(out)
    net.addModule(bias)

    net.addConnection(FullConnection(inp, h1))
    net.addConnection(FullConnection(h1, h2))
    net.addConnection(FullConnection(h2, out))

    net.addConnection(FullConnection(bias, h1))
    net.addConnection(FullConnection(bias, h2))
    net.addConnection(FullConnection(bias, out))

    net.sortModules()
    return net
Example #9
0
 def Update(self, hiden, h):
     self.net = FeedForwardNetwork()
     self.inputlayer = LinearLayer(self.inputsize, "Input")
     self.net.addInputModule(self.inputlayer)
     self.outputlayer = LinearLayer(self.outputsize, "Output")
     self.net.addOutputModule(self.outputlayer)
     self.hidenlayers = []
     for i in xrange(len(hiden)):
         self.hidenlayers.append(SigmoidLayer(hiden[i], "hiden%s" % i))
         self.net.addModule(self.hidenlayers[-1])
     self.net.addConnection(
         FullConnection(self.inputlayer, self.outputlayer))
     for i in xrange(len(self.hidenlayers)):
         self.net.addConnection(
             FullConnection(self.inputlayer, self.hidenlayers[i]))
         self.net.addConnection(
             FullConnection(self.hidenlayers[i], self.outputlayer))
     for i in xrange(len(self.hidenlayers)):
         for j in xrange(i + 1, len(self.hidenlayers)):
             if i < h:
                 self.net.addConnection(
                     FullConnection(self.hidenlayers[i],
                                    self.hidenlayers[j]))
             elif i == h:
                 self.net.addConnection(
                     FullConnection(self.hidenlayers[i],
                                    self.hidenlayers[j],
                                    inSliceTo=hiden[i] - 1))
             else:
                 self.net.addConnection(
                     FullConnection(self.hidenlayers[i],
                                    self.hidenlayers[j]))
             #self.print_conections(self.net)
     self.net.sortModules()
     self.hiden = hiden
Example #10
0
def _buildNetwork(*layers, **options):
    """This is a helper function to create different kinds of networks.

    `layers` is a list of tuples. Each tuple can contain an arbitrary number of
    layers, each being connected to the next one with IdentityConnections. Due 
    to this, all layers have to have the same dimension. We call these tuples
    'parts.'
    
    Afterwards, the last layer of one tuple is connected to the first layer of 
    the following tuple by a FullConnection.
    
    If the keyword argument bias is given, BiasUnits are added additionally with
    every FullConnection. 

    Example:
    
        _buildNetwork(
            (LinearLayer(3),),
            (SigmoidLayer(4), GaussianLayer(4)),
            (SigmoidLayer(3),),
        )
    """
    bias = options['bias'] if 'bias' in options else False

    net = FeedForwardNetwork()
    layerParts = iter(layers)
    firstPart = iter(layerParts.next())
    firstLayer = firstPart.next()
    net.addInputModule(firstLayer)

    prevLayer = firstLayer

    for part in chain(firstPart, layerParts):
        new_part = True
        for layer in part:
            net.addModule(layer)
            # Pick class depending on wether we entered a new part
            if new_part:
                ConnectionClass = FullConnection
                if bias:
                    biasUnit = BiasUnit('BiasUnit for %s' % layer.name)
                    net.addModule(biasUnit)
                    net.addConnection(FullConnection(biasUnit, layer))
            else:
                ConnectionClass = IdentityConnection
            new_part = False
            conn = ConnectionClass(prevLayer, layer)
            net.addConnection(conn)
            prevLayer = layer
    net.addOutputModule(layer)
    net.sortModules()
    return net
Example #11
0
def build_pybrain_flat_network(data, pathways, layers=2, second_hidden=5):
    #data is a data frame straight from affy - columns are patients, rows are entrez genes
    #pathways is a dict with {pathway1:[gene1, gene2, ...], pathway2:[gene, gene...]...}

    in_data = data.values.T

    fnn = pybrain.structure.networks.FeedForwardNetwork()

    inLayer = pybrain.structure.LinearLayer(in_data.shape[1])
    fnn.addInputModule(inLayer)

    outLayer = pybrain.structure.SoftmaxLayer(2)
    fnn.addOutputModule(outLayer)

    hidden_list = []
    #right now I have two sigmoid hidden layers
    #can and will probably change
    for i in range(layers):
        if i == 0:
            hidden_list.append(pybrain.structure.SigmoidLayer(len(pathways)))
            fnn.addModule(hidden_list[i])
        else:
            hidden_list.append(pybrain.structure.SigmoidLayer(second_hidden))
            fnn.addModule(hidden_list[i])

    ## add connections input to hidden is sparse, but second hidden and output are fully connected

    #add fully connected layers
    hidden_to_out = pybrain.structure.connections.FullConnection(
        hidden_list[-1], outLayer)
    fnn.addConnection(hidden_to_out)

    #     hidden_connects=[]
    for hl in range(1, layers):
        #this first step may be unncessary, but I am saving the connections objects to a list incase I need them later
        #         hidden_connects.append(FullConnection(hidden_list[hl-1],hidden_list[hl]))
        fnn.addConnection(FullConnection(hidden_list[hl - 1], hidden_list[hl]))

    #now the tricky layer from the pathways database
    fnn = add_pathway_connections(fnn, inLayer, hidden_list[0], data, pathways)

    fnn.sortModules()
    return fnn
Example #12
0
def build_pybrain_deep_network(data, pathways, filters=5, third_layer_nodes=5):
    #data is a data frame straight from affy - columns are patients, rows are entrez genes
    #pathways is a dict with {pathway1:[gene1, gene2, ...], pathway2:[gene, gene...]...}

    layers = filters

    in_data = data.values.T

    fnn = pybrain.structure.networks.FeedForwardNetwork()

    inLayer = pybrain.structure.LinearLayer(in_data.shape[1])
    fnn.addInputModule(inLayer)

    outLayer = pybrain.structure.SoftmaxLayer(2)
    fnn.addOutputModule(outLayer)

    hidden_list = []
    #right now I have two sigmoid hidden layers
    #can and will probably change
    for i in range(layers):
        hidden_list.append(pybrain.structure.SigmoidLayer(len(pathways)))
        fnn.addModule(hidden_list[i])

    clean_up_layer = pybrain.structure.SigmoidLayer(third_layer_nodes)
    fnn.addModule(clean_up_layer)

    ## add connections input to hidden is sparse, but second hidden and output are fully connected

    #add fully connected layers
    hidden_to_out = pybrain.structure.connections.FullConnection(
        clean_up_layer, outLayer)
    fnn.addConnection(hidden_to_out)

    #     hidden_connects=[]
    for i in range(layers):
        fnn.addConnection(FullConnection(hidden_list[i], clean_up_layer))

        #now the tricky layer from the pathways database
        fnn = add_pathway_connections(fnn, inLayer, hidden_list[i], data,
                                      pathways)

    fnn.sortModules()
    return fnn
Example #13
0
    def build_network(self):
        network_options = {
            'hiddenclass': self.HIDDEN_TYPE,
            'outclass': self.OUTPUT_TYPE,
            'recurrent': True,
            'bias': True
        }
        layout = tuple([self.NUM_POS] + self.HIDDEN_LIST + [self.NUM_OUTPUTS])
        network = buildNetwork(*layout, **network_options)

        # these are the default "module" names
        # NOTE: you DO have to add a hidden->hidden connection even when you set rec=True
        #   bc otw how would it know that you wanted that /particular/ connection!?
        h = network['hidden0']
        #o = network['out']
        network.addRecurrentConnection(FullConnection(
            h, h))  # made automatically below?
        #network.addRecurrentConnection(FullConnection(o, h))
        network.sortModules()
        return network
Example #14
0
    def build_network(self):
        network = buildNetwork(self.NUM_POS,
                               self.HIDDEN_SIZE,
                               self.NUM_OUTPUTS,
                               bias=True,
                               hiddenclass=LSTMLayer,
                               outclass=TanhLayer,
                               recurrent=True)

        # these are the default "module" names
        # NOTE: you DO have to add a hidden->hidden connection even when you set rec=True
        #   bc otw how would it know that you wanted that /particular/ connection!?
        h = network['hidden0']
        o = network['out']

        network.addRecurrentConnection(FullConnection(o, h))

        # gets added automatically when connecting o->h for some reason
        #network.addRecurrentConnection(FullConnection(h, h))

        network.sortModules()  # must re-sort after adding new connection
        return network
Example #15
0
def buildNetwork(*layers, **options):
    """Build arbitrary deep networks.
    
    `layers` should be a list or tuple of integers, that indicate how many 
    neurons the layers shoudl have. `bias` and `outputbias` are flags to 
    indicate wether the network should have the corresponding biases; both
    default to True.
        
    To adjust the classes for the layers use the `hiddenclass` and  `outclass`
    parameters, which expect a subclass of NeuronLayer.
    
    If the `recurrent` flag is set, a RecurrentNetwork will be created, 
    otherwise a FeedForwardNetwork.
    
    If the `fast` flag is set, faster arac networks will be used instead of the 
    pybrain implementations."""
    # options
    opt = {
        'bias': True,
        'hiddenclass': SigmoidLayer,
        'outclass': LinearLayer,
        'outputbias': True,
        'peepholes': False,
        'recurrent': False,
        'fast': False,
    }
    for key in options:
        if key not in opt.keys():
            raise NetworkError('buildNetwork unknown option: %s' % key)
        opt[key] = options[key]

    if len(layers) < 2:
        raise NetworkError(
            'buildNetwork needs 2 arguments for input and output layers at least.'
        )

    # Bind the right class to the Network name
    network_map = {
        (False, False): FeedForwardNetwork,
        (True, False): RecurrentNetwork,
    }
    try:
        network_map[(False, True)] = _FeedForwardNetwork
        network_map[(True, True)] = _RecurrentNetwork
    except NameError:
        if opt['fast']:
            raise NetworkError("No fast networks available.")
    if opt['hiddenclass'].sequential or opt['outclass'].sequential:
        if not opt['recurrent']:
            # CHECKME: a warning here?
            opt['recurrent'] = True
    Network = network_map[opt['recurrent'], opt['fast']]
    n = Network()
    # linear input layer
    n.addInputModule(LinearLayer(layers[0], name='in'))
    # output layer of type 'outclass'
    n.addOutputModule(opt['outclass'](layers[-1], name='out'))
    if opt['bias']:
        # add bias module and connection to out module, if desired
        n.addModule(BiasUnit(name='bias'))
        if opt['outputbias']:
            n.addConnection(FullConnection(n['bias'], n['out']))
    # arbitrary number of hidden layers of type 'hiddenclass'
    for i, num in enumerate(layers[1:-1]):
        layername = 'hidden%i' % i
        n.addModule(opt['hiddenclass'](num, name=layername))
        if opt['bias']:
            # also connect all the layers with the bias
            n.addConnection(FullConnection(n['bias'], n[layername]))
    # connections between hidden layers
    for i in range(len(layers) - 3):
        n.addConnection(
            FullConnection(n['hidden%i' % i], n['hidden%i' % (i + 1)]))
    # other connections
    if len(layers) == 2:
        # flat network, connection from in to out
        n.addConnection(FullConnection(n['in'], n['out']))
    else:
        # network with hidden layer(s), connections from in to first hidden and last hidden to out
        n.addConnection(FullConnection(n['in'], n['hidden0']))
        n.addConnection(
            FullConnection(n['hidden%i' % (len(layers) - 3)], n['out']))

    # recurrent connections
    if issubclass(opt['hiddenclass'], LSTMLayer):
        if len(layers) > 3:
            errorexit(
                "LSTM networks with > 1 hidden layers are not supported!")
        n.addRecurrentConnection(FullConnection(n['hidden0'], n['hidden0']))

    n.sortModules()
    return n
Example #16
0
 def __init__(self, *args, **kwargs):
     FullConnection.__init__(self, *args, **kwargs)
     assert self.indim == self.outdim, \
         "Indim (%i) does not equal outdim (%i)" % (
         self.indim, self.outdim)
Example #17
0
 def __init__(self, *args, **kwargs):
     FullConnection.__init__(self, *args, **kwargs)
     assert self.indim == self.outdim, \
         "Indim (%i) does not equal outdim (%i)" % (
         self.indim, self.outdim)
def main():
    results = []
    args = parse_args()

    # there's a bug in the _sparseness method in sklearn's nmf module that is
    # hit in some edge cases.  The value it computes isn't actually needed in
    # this case, so we can just ignore this divide by 0 error
    np.seterr(invalid="ignore")

    mtx = np.loadtxt(args.data_file, delimiter=',', skiprows=1)
    clabels = np.loadtxt(args.class_file, delimiter=',')

    print("Matrix is %d by %d and %f sparse" %
          (len(mtx), len(mtx[0]), Matrix.get_sparsity(mtx)))
    #print("clabels is %d by %d and %f sparse" % (len(clabels), len(clabels[0]), Matrix.get_sparsity(clabels)))
    #mtx = np.matrix.transpose(mtx)  # transpose to put samples into columns, genes into rows

    # create random class labels, replace with result of NMF
    #clabels = np.zeros(len(mtx))
    #for i in range(len(mtx)):
    # clabels[i] = random.randint(0, 3)
    clabels = np.matrix.transpose(clabels)

    print '-----------Logestic Regression-----------'
    t_lacc = 0
    for i in range(10):
        t_lacc = t_lacc + logistic_regression(mtx, clabels, True)

    print 'accuracy of logistic regression ', (t_lacc * 10)

    print '-----------ANN Computation----------'
    # prepare dataset for ANN
    ds = ClassificationDataSet(len(mtx[0]), 1,
                               nb_classes=5)  # replace with result of NMF
    for k in xrange(len(mtx)):
        ds.addSample(np.ravel(mtx[k]), clabels[k])

    # 10-fold cv
    t_error = 0
    t_acc = 0
    for i in range(10):
        # divide the data into training and test sets

        tstdata_temp, trndata_temp = ds.splitWithProportion(0.10)

        tstdata = ClassificationDataSet(len(mtx[0]), 1, nb_classes=5)
        for n in xrange(0, tstdata_temp.getLength()):
            tstdata.addSample(
                tstdata_temp.getSample(n)[0],
                tstdata_temp.getSample(n)[1])

        trndata = ClassificationDataSet(len(mtx[0]), 1, nb_classes=5)
        for n in xrange(0, trndata_temp.getLength()):
            trndata.addSample(
                trndata_temp.getSample(n)[0],
                trndata_temp.getSample(n)[1])

        trndata._convertToOneOfMany()
        tstdata._convertToOneOfMany()

        fnn = FeedForwardNetwork()
        inp = LinearLayer(trndata.indim)
        h1 = SigmoidLayer(10)
        h2 = TanhLayer(10)
        h3 = TanhLayer(10)
        h4 = TanhLayer(10)
        h5 = TanhLayer(10)
        outp = LinearLayer(trndata.outdim)
        #fnn = buildNetwork( trndata.indim, 10 , trndata.outdim, outclass=SoftmaxLayer )

        # add modules
        fnn.addOutputModule(outp)
        fnn.addInputModule(inp)
        fnn.addModule(h1)
        fnn.addModule(h2)
        fnn.addModule(h3)
        fnn.addModule(h4)
        fnn.addModule(h5)
        # create connections
        fnn.addConnection(FullConnection(inp, h1))
        fnn.addConnection(FullConnection(inp, h2))
        fnn.addConnection(FullConnection(inp, h3))
        fnn.addConnection(FullConnection(inp, h4))
        fnn.addConnection(FullConnection(inp, h5))
        fnn.addConnection(FullConnection(h1, h2))
        fnn.addConnection(FullConnection(h2, h3))
        fnn.addConnection(FullConnection(h3, h4))
        fnn.addConnection(FullConnection(h4, h5))

        fnn.addConnection(FullConnection(h5, outp))

        fnn.sortModules()

        trainer = BackpropTrainer(fnn,
                                  dataset=trndata,
                                  momentum=0.1,
                                  learningrate=0.01,
                                  verbose=True,
                                  weightdecay=0.01)

        #trainer.trainUntilConvergence()
        trainer.trainEpochs(5)

        t_error = t_error + percentError(
            trainer.testOnClassData(dataset=tstdata), tstdata['class'])

    print 'avg error ', (t_error / 10)
    print 'avg acc ', (100 - (t - error / 10))
Example #19
0
print sds['target']

# makes it so there are the same number of output neurons as classes
# (no longer does anything)
#sds._convertToOneOfMany()
#print 'converted:'
#print sds['target'] # now it's a (2 by n array)

# bias adds a "biasModule" on all the hidden layers
#   and if outputbias is True too, then also on the output layer
recursive_network = buildNetwork(3, 30, 2,
                         hiddenclass=LSTMLayer, outclass=TanhLayer, recurrent=True)


# does this help, and why?
recCon = FullConnection(recursive_network['out'], recursive_network['hidden0'])
recursive_network.addRecurrentConnection(recCon)

# must re-sort after adding another connection
recursive_network.sortModules()

print "------Before Training:"

def test_on_sentence(the_sentence):
    recursive_network.reset()
    for i, word in enumerate(the_sentence):
        if i < len(the_sentence)-1:
            recursive_network.activate(word)
        else:
            print recursive_network.activate(word)
Example #20
0
# create network and layers
net = FeedForwardNetwork()
in_layer = LinearLayer(16)
hid1_layer = SigmoidLayer(20)
hid2_layer = SigmoidLayer(20)
out_layer = SigmoidLayer(2)

# add layers to network
net.addInputModule(in_layer)
net.addModule(hid1_layer)
net.addModule(hid2_layer)
net.addOutputModule(out_layer)

# create connections between layers
in_to_hid1 = FullConnection(in_layer, hid1_layer)
hid1_to_hid2 = FullConnection(hid1_layer, hid2_layer)
hid2_to_out = FullConnection(hid2_layer, out_layer)

# add connections to network
net.addConnection(in_to_hid1)
net.addConnection(hid1_to_hid2)
net.addConnection(hid2_to_out)

# sort modules
net.sortModules()

# set up game
game = Game(None, 4)
generation_number = 1
amount_of_generations = 0
Example #21
0
    def __init__(self, R, P, T):
        """
		FeedForwardNetworks are networks that do not work for sequential data. 
		Every input is treated as independent of any previous or following inputs.

		"""

        self._ffn = FeedForwardNetwork()
        """
			Input layer:
		    R_iP_j (region i, product j) at times (t-T, .., t-1)

		    T - time interval
		    R - number of regions
		    P - number of products

		    RPT - dimensionality of input layer

		    ***
		    input SORTED in ORDER RPT
		    ***
		"""

        dim = T * R * P

        inputL = LinearLayer(T, name="input layer")
        """
			Layer 1:
		    groups of neurons for R_iP_j
		    k_1*R*P
		"""

        k1 = T / 25
        h1 = k1 * R * P
        # weighted average
        hiddenL_1 = LinearLayer(h1, name="hidden layer 1 - R_iP_j")

        k2 = k1 / 2
        h2 = k2 * (R + P)
        hiddenL_2 = TanhLayer(h2, name="hidden layer 2 - R_i, P_j")

        h3 = 2 * h2
        hiddenL_3 = TanhLayer(h3, name="hidden layer 3 - random nodes")

        outputL = LinearLayer(R * P, "output layer")
        """
			add layers to network
		"""
        self._ffn.addInputModule(inputL)
        self._ffn.addOutputModule(outputL)

        self._ffn.addModule(hiddenL_1)
        self._ffn.addModule(hiddenL_2)
        self._ffn.addModule(hiddenL_3)
        """
			create connections between layers
		"""

        # INPUT => 1ST HIDDEN LAYER

        # T*k2 weights per slice
        # mother connection to hold shared weights
        mc1 = MotherConnection(T * k1, name="sharedConnection")

        # keep slice indices to check
        inSlices = dict()
        outSlices = dict()

        # keep slices to check
        inputSlices = dict()
        h1Slices = dict()

        # keep connections to check
        sharedConn = dict()

        for i in range(R * P):
            outSlices[i] = (i * T, (i + 1) * T - 1)
            inSlices[i] = (i * k1, (i + 1) * k1 - 1)

            print outSlices[i], inSlices[i]

            inputSlices[i] = ModuleSlice(inputL,
                                         inSliceFrom=outSlices[i][0],
                                         inSliceTo=outSlices[i][1],
                                         outSliceFrom=outSlices[i][0],
                                         outSliceTo=outSlices[i][1])
            print inputSlices[i]
            h1Slices[i] = ModuleSlice(hiddenL_1,
                                      inSliceFrom=inSlices[i][0],
                                      inSliceTo=inSlices[i][1],
                                      outSliceFrom=inSlices[i][0],
                                      outSliceTo=inSlices[i][1])
            print h1Slices[i]

            sharedConn[i] = SharedFullConnection(mc1, inputSlices[i],
                                                 h1Slices[i])
            #print sharedConn[i].params

        for conn in sharedConn.itervalues():
            #print conn
            #print conn.params
            self._ffn.addConnection(conn)

        # 1ST HIDDEN LAYER => 2ND HIDDEN LAYER
        h2_inIndices = dict()
        h2_inSlices = dict()
        for i in range(R + P):
            h2_inIndices[i] = (k2 * i, k2 * (i + 1) - 1)
            print h2_inIndices[i]
            # no outSlices for h2 since it will be fully connected to h3
            h2_inSlices[i] = ModuleSlice(
                hiddenL_2,
                inSliceFrom=h2_inIndices[i][0],
                inSliceTo=h2_inIndices[i][1]
            )  #, outSliceFrom=h2_inIndices[i][0], outSliceTo=h2_inIndices[i][1])

        # link each R_iP_j h1Slice with R_i and P_j h2_inSlices respectively
        h1h2Conn = dict()
        # there are R*P h1 slices, take every P slices and link them to P_i
        rj = 0
        pj = R - 1
        for i in range(R * P):
            #print "before",rj, pj,i
            if i != 0 and i % P == 0:
                rj = rj + 1
                pj = R
            else:
                pj = pj + 1

            #print rj, pj

            print h1Slices[i], h2_inSlices[rj]

            h1h2Conn[i] = FullConnection(h1Slices[i],
                                         h2_inSlices[rj],
                                         name="h1_h2_" + str(i))
            print h1Slices[i], h2_inSlices[pj]
            h1h2Conn[R * P + i] = FullConnection(h1Slices[i],
                                                 h2_inSlices[pj],
                                                 name="h1_h2_" +
                                                 str(R * P + i))

        for conn in h1h2Conn.itervalues():
            print conn
            print conn.params
            self._ffn.addConnection(conn)
        """
			CAREFUL: for test numbers only 3 params for each pair of connected slices although it should be 4*2=8??
			
		# full connection between Region and State layer and random hidden layer
		self._ffn.addConnection(FullConnection(hiddenL_2, hiddenL_3))

		# full connection from random to output layer
		self._ffn.addConnection(FullConnection(hiddenL_3, outputL))

		"""
        self._ffn.sortModules()