Exemple #1
0
def crearRN():
    #Se crea la red neuronal
    n = FeedForwardNetwork()

    #Se declaran las laminas de entrada, las laminas escondidas y las de salida de la red neuronal
    inLayer = LinearLayer(4096)
    hiddenLayer = SigmoidLayer(3)
    outLayer = LinearLayer(1)

    #Se agregan los layers a la red neuronal
    n.addInputModule(inLayer)
    n.addModule(hiddenLayer)
    n.addOutputModule(outLayer)

    #Se declaran las conexiones de los nodos
    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_out = FullConnection(hiddenLayer, outLayer)

    #Se establecen las conexiones en los layers de la red neuronal
    n.addConnection(in_to_hidden)
    n.addConnection(hidden_to_out)

    #Red neuronal lista para usar
    n.sortModules()

    return n
Exemple #2
0
    def __setUpBrain(self, genome):
        """
		Set up PyBrain's neural network
		
		Args:
		    genome (G1DList): PyEvolve's individual container
		"""
        self.network = FeedForwardNetwork()

        inLayer = TanhLayer(14)
        hiddenLayer = TanhLayer(12)
        hiddenLayer2 = TanhLayer(6)
        outLayer = TanhLayer(2)

        self.network.addInputModule(inLayer)
        self.network.addModule(hiddenLayer)
        self.network.addModule(hiddenLayer2)
        self.network.addOutputModule(outLayer)

        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_hidden2 = FullConnection(hiddenLayer, hiddenLayer2)
        hidden2_to_out = FullConnection(hiddenLayer2, outLayer)

        self.network.addConnection(in_to_hidden)
        self.network.addConnection(hidden_to_hidden2)
        self.network.addConnection(hidden2_to_out)

        self.network.sortModules()

        new_params = numpy.array(genome.genomeList)
        self.network._setParameters(new_params)
    def __init__(self, genes=None):
        self.net = FeedForwardNetwork()

        inLayer = LinearLayer(Brain.G_INPUTNODES, name='input')
        hiddenLayer1 = SigmoidLayer(Brain.G_HIDDENNODES_L1, name='hidden1')
        hiddenLayer2 = SigmoidLayer(Brain.G_HIDDENNODES_L2, name='hidden2')
        outLayer = SigmoidLayer(Brain.G_OUTPUTNODES, name='out')
        bias = BiasUnit(name='bias')

        self.net.addInputModule(inLayer)
        self.net.addModule(hiddenLayer1)
        self.net.addModule(hiddenLayer2)
        self.net.addModule(bias)
        self.net.addOutputModule(outLayer)

        in_to_hidden1 = FullConnection(inLayer, hiddenLayer1)
        hidden1_to_hidden2 = FullConnection(hiddenLayer1, hiddenLayer2)
        hidden2_to_out = FullConnection(hiddenLayer2, outLayer)
        bias_to_hidden1 = FullConnection(bias, hiddenLayer1)
        bias_to_hidden2 = FullConnection(bias, hiddenLayer2)
        bias_to_out = FullConnection(bias, outLayer)

        self.net.addConnection(in_to_hidden1)
        self.net.addConnection(hidden1_to_hidden2)
        self.net.addConnection(hidden2_to_out)
        self.net.addConnection(bias_to_hidden1)
        self.net.addConnection(bias_to_hidden2)
        self.net.addConnection(bias_to_out)

        self.net.sortModules()

        if genes != None:
            self.import_genes(genes)
Exemple #4
0
def trained_cat_dog_ANN():
    n = FeedForwardNetwork()
    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size + 1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.sortModules()
    n.convertToFastNetwork()
    print 'successful converted to fast network'
    t = BackpropTrainer(n, d, learningrate=0.0001)  #, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break

    exportCatDogANN(n)
    return n
    def __init__(self, genes=None):

        self.net = FeedForwardNetwork()
        self.inLayer = TanhLayer(16)
        self.hiddenLayer = TanhLayer(20)
        self.hiddenLayer2 = TanhLayer(20)
        self.outLayer = SoftmaxLayer(4)

        self.net.addInputModule(self.inLayer)
        self.net.addModule(self.hiddenLayer)
        self.net.addModule(self.hiddenLayer2)
        self.net.addOutputModule(self.outLayer)

        self.in_to_hidden = FullConnection(self.inLayer, self.hiddenLayer)
        self.hidden1_to_hidden2 = FullConnection(self.hiddenLayer, self.hiddenLayer2)
        self.hidden2_to_out = FullConnection(self.hiddenLayer2, self.outLayer)

        self.net.addConnection(self.in_to_hidden)
        self.net.addConnection(self.hidden1_to_hidden2)
        self.net.addConnection(self.hidden2_to_out)

        self.net.sortModules()

        # Set the params to the provided params
        if genes is not None:
            self.net._setParameters(genes)
Exemple #6
0
def create_network():
    # Create the network itself
    network = FeedForwardNetwork()
    # Create layers
    NUMBER_OF_INPUT_BYTES = 1600  # because at input we have picture 40x40 size
    NUMBER_OF_HIDDEN_LAYERS = 10  # number of hidden layers
    NUMBER_OF_OUTPUT_CLASSES = 8  # because in output we have 8 classes
    inLayer = LinearLayer(NUMBER_OF_INPUT_BYTES)
    hiddenLayer = SigmoidLayer(NUMBER_OF_HIDDEN_LAYERS)
    outLayer = LinearLayer(NUMBER_OF_OUTPUT_CLASSES)
    # Create connections between layers
    # We create FullConnection - each neuron of one layer is connected to each neuron of other layer
    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_out = FullConnection(hiddenLayer, outLayer)
    # Add layers to our network
    network.addInputModule(inLayer)
    network.addModule(hiddenLayer)
    network.addOutputModule(outLayer)
    # Add connections to network
    network.addConnection(in_to_hidden)
    network.addConnection(hidden_to_out)
    # Sort modules to make multilayer perceptron usable
    network.sortModules()
    # prepare array to activate network
    d_letter_array = read_array("d")
    # activate network
    network.activate(d_letter_array)
    return network
Exemple #7
0
def _init_net(params_len, output_layer_num, hidden_size):
    # init and train
    net = FeedForwardNetwork()
    """ Next, we're constructing the input, hidden and output layers. """
    inLayer = LinearLayer(params_len)
    hiddenLayer = SigmoidLayer(hidden_size)
    hiddenLayer1 = SigmoidLayer(hidden_size)
    outLayer = LinearLayer(output_layer_num)
    """ (Note that we could also have used a hidden layer of type TanhLayer, LinearLayer, etc.)
    Let's add them to the network: """
    net.addInputModule(inLayer)
    net.addModule(hiddenLayer)
    net.addModule(hiddenLayer1)
    net.addOutputModule(outLayer)
    """ We still need to explicitly determine how they should be connected. For this we use the most
    common connection type, which produces a full connectivity between two layers (or Modules, in general):
    the 'FullConnection'. """

    in2hidden = FullConnection(inLayer, hiddenLayer)
    hidden2hidden = FullConnection(hiddenLayer, hiddenLayer1)
    hidden2out = FullConnection(hiddenLayer1, outLayer)

    net.addConnection(in2hidden)
    net.addConnection(hidden2hidden)
    net.addConnection(hidden2out)
    """ All the elements are in place now, so we can do the final step that makes our MLP usable,
    which is to call the 'sortModules()' method. """

    net.sortModules()

    # net = buildNetwork( params_len, hidden_size, 601, bias = True )
    return net
def train_net(data_set, n, epochs=1):
    num_inputs = len(data_set[0][0][n])
    ds = SupervisedDataSet(num_inputs, 2)
    for i in range(len(data_set)):
        try:
            ds.appendLinked(data_set[i][0][n],
                            (data_set[i][1], data_set[i][2]))
        except:
            continue
    print str(len(ds)) + ' points successfully aquired'

    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(num_inputs, name='input'))
    net.addInputModule(BiasUnit(name='bias'))
    net.addOutputModule(LinearLayer(2, name='output'))
    net.addModule(SigmoidLayer(int((num_inputs + 2) / 2.), name='sigmoid'))
    net.addModule(TanhLayer(10, name='tanh'))
    net.addConnection(FullConnection(net['bias'], net['sigmoid']))
    net.addConnection(FullConnection(net['bias'], net['tanh']))
    net.addConnection(FullConnection(net['input'], net['sigmoid']))
    net.addConnection(FullConnection(net['sigmoid'], net['tanh']))
    net.addConnection(FullConnection(net['tanh'], net['output']))
    net.sortModules()

    trainer = BackpropTrainer(net,
                              learningrate=0.01,
                              momentum=0.1,
                              verbose=True)

    trainer.trainOnDataset(ds)
    trainer.trainEpochs(epochs)

    return net
Exemple #9
0
    def create(number_of_hidden_layers, activation_function, input_length,
               output_length, network_file, classify):
        n = FeedForwardNetwork()
        in_layer = LinearLayer(input_length)
        n.addInputModule(in_layer)

        layer_to_connect_to = in_layer
        for x in range(0, number_of_hidden_layers):
            if activation_function == 'sigmoid':
                hidden_layer = SigmoidLayer(input_length)
            else:
                hidden_layer = TanhLayer(input_length)

            n.addModule(hidden_layer)
            hidden_layer_connection = FullConnection(layer_to_connect_to,
                                                     hidden_layer)
            n.addConnection(hidden_layer_connection)
            layer_to_connect_to = hidden_layer

        if classify:
            out_layer = SoftmaxLayer(output_length)
        else:
            out_layer = LinearLayer(output_length)
        n.addOutputModule(out_layer)

        hidden_to_out = FullConnection(layer_to_connect_to, out_layer)
        n.addConnection(hidden_to_out)
        n.sortModules()
        save_network(n, network_file)
Exemple #10
0
def buildNN2HiddenLayer(trnData, netNo):
    from pybrain.structure import FeedForwardNetwork, RecurrentNetwork
    from pybrain.structure import LinearLayer, SigmoidLayer, TanhLayer, SoftmaxLayer
    from pybrain.structure import FullConnection

    n = FeedForwardNetwork()
    inLayer = LinearLayer(trnData.indim)  # Define Layer Types
    if netNo == 1 or netNo == 3:
        hiddenLayer0 = TanhLayer(hiddenLayer0neurons)  # Tanh
        hiddenLayer1 = SigmoidLayer(hiddenLayer1neurons)  # Sigmoid
    elif netNo == 2:
        hiddenLayer0 = TanhLayer(hiddenLayer1neurons)  # Tanh
        hiddenLayer1 = SigmoidLayer(hiddenLayer0neurons)  # Sigmoid

    outLayer = SoftmaxLayer(trnData.outdim)  # SoftmaxLayer

    n.addInputModule(inLayer)
    n.addModule(hiddenLayer0)
    n.addModule(hiddenLayer1)
    n.addOutputModule(outLayer)

    in_to_hidden0 = FullConnection(inLayer, hiddenLayer0)  # Define connections
    hidden0_to_hidden1 = FullConnection(hiddenLayer0, hiddenLayer1)
    hidden1_to_out = FullConnection(hiddenLayer1, outLayer)
    n.addConnection(in_to_hidden0)
    n.addConnection(hidden0_to_hidden1)
    n.addConnection(hidden1_to_out)
    n.sortModules()
    return n
Exemple #11
0
    def __init__(self, train_data, hyper,  n_targets=None, label_targets=None):
        """
    ------------

    train_data: pandas DataFrame
                Contains columns for features and for target variables. The names of the target variables ends
                with the suffix "_tau"
    hyper:      dictionary
                It contains the hyperparameters necessary to run all the functionalities of the model.
                 They are the following:
                "structure" is a list of integers determining the number of neurons in each hidden layer
                "epochs" an integer specifying the maximum number of epochs to run during every training session
                "learning_rate" a float giving the learning rate of the gradient descend
                "momentum" a float giving the value of the momentum for the algorithm
                "batch" a bool. If True the method performs full batch learning, i.e. updates of the weights is done
                using all the instances of the training set. Else, normal online method is performed
                Other parameters regarding cross validation are explained in the base class

        """
        Regression.__init__(self, train_data, hyper, n_targets=n_targets, label_targets=label_targets)

        self.N = FeedForwardNetwork()
        self.structure = [self.n_feature] + hyper['structure'] + [self.n_target]

        self._build_net(self.structure)
        self.res_params = [self.N.params[i] for i in range(len(self.N.params))]

        self.train_fraction = hyper['train_fraction']
        self.seed = hyper['seed']
        self.epochs = hyper['epochs']
        self.learning_rate = hyper['learning_rate']
        self.momentum = hyper['momentum']
        self.batch = bool(hyper['batch'])
Exemple #12
0
 def Update(self, hiden, h):
     self.net = FeedForwardNetwork()
     self.inputlayer = LinearLayer(self.inputsize, "Input")
     self.net.addInputModule(self.inputlayer)
     self.outputlayer = LinearLayer(self.outputsize, "Output")
     self.net.addOutputModule(self.outputlayer)
     self.hidenlayers = []
     for i in xrange(len(hiden)):
         self.hidenlayers.append(SigmoidLayer(hiden[i], "hiden%s" % i))
         self.net.addModule(self.hidenlayers[-1])
     self.net.addConnection(
         FullConnection(self.inputlayer, self.outputlayer))
     for i in xrange(len(self.hidenlayers)):
         self.net.addConnection(
             FullConnection(self.inputlayer, self.hidenlayers[i]))
         self.net.addConnection(
             FullConnection(self.hidenlayers[i], self.outputlayer))
     for i in xrange(len(self.hidenlayers)):
         for j in xrange(i + 1, len(self.hidenlayers)):
             if i < h:
                 self.net.addConnection(
                     FullConnection(self.hidenlayers[i],
                                    self.hidenlayers[j]))
             elif i == h:
                 self.net.addConnection(
                     FullConnection(self.hidenlayers[i],
                                    self.hidenlayers[j],
                                    inSliceTo=hiden[i] - 1))
             else:
                 self.net.addConnection(
                     FullConnection(self.hidenlayers[i],
                                    self.hidenlayers[j]))
             #self.print_conections(self.net)
     self.net.sortModules()
     self.hiden = hiden
Exemple #13
0
 def __init__(self, arg):
     self.inputsize = arg[0]
     self.outputsize = arg[-1]
     self.hiden = arg[1:-1]
     self.err = 1
     self.old_err = 1
     b = []
     b.append(self.inputsize)
     b += self.hiden
     b.append(self.outputsize)
     #print b#"%s, %s, %s, hiddenclass=TanhLayer"%(self.inputsize, self.hiden, self.outputsize)
     self.net = FeedForwardNetwork()
     self.inputlayer = LinearLayer(self.inputsize, "Input")
     self.net.addInputModule(self.inputlayer)
     self.outputlayer = LinearLayer(self.outputsize, "Output")
     self.net.addOutputModule(self.outputlayer)
     self.hidenlayers = []
     for i in xrange(len(self.hiden)):
         self.hidenlayers.append(SigmoidLayer(self.hiden[i], "hiden%s" % i))
         self.net.addModule(self.hidenlayers[-1])
     self.net.addConnection(
         FullConnection(self.inputlayer, self.outputlayer))
     for i in xrange(len(self.hidenlayers)):
         self.net.addConnection(
             FullConnection(self.inputlayer, self.hidenlayers[i]))
         self.net.addConnection(
             FullConnection(self.hidenlayers[i], self.outputlayer))
     for i in xrange(len(self.hidenlayers)):
         for j in xrange(i + 1, len(self.hidenlayers)):
             self.net.addConnection(
                 FullConnection(self.hidenlayers[i], self.hidenlayers[j]))
             #self.print_conections(self.net)
     self.net.sortModules()
     self.ds = SupervisedDataSet(self.inputsize, self.outputsize)
Exemple #14
0
def getMultiplayerFeedForwardNetwork(inputLayerLen,
                                     hiddenLayersLenList,
                                     outLayerLen=1):
    #create net
    net = FeedForwardNetwork()
    #create layers
    inLayer = LinearLayer(inputLayerLen, name='inLinearLayer')
    hiddenLayers = [
        SigmoidLayer(n, name='sigmoidLayer' + str(i))
        for i, n in enumerate(hiddenLayersLenList)
    ]
    outLayer = LinearLayer(outLayerLen, name='outLinearLayer')
    #add layers to net
    net.addInputModule(inLayer)
    for l in hiddenLayers:
        net.addModule(l)
    net.addOutputModule(outLayer)
    #create connections
    layers = [inLayer] + hiddenLayers + [outLayer]
    connections = [
        FullConnection(layers[i], layers[i + 1], name='connection' + str(i))
        for i in range(len(layers) - 1)
    ]
    #add connections to net
    for c in connections:
        net.addConnection(c)
    #do some required initialization
    net.sortModules()

    return net
def build_deep_network(linear_dimensions):
    neural_net = FeedForwardNetwork()

    inLayer = LinearLayer(linear_dimensions)
    hiddenLayer_1 = SigmoidLayer(100)
    hiddenLayer_2 = SigmoidLayer(100)
    hiddenLayer_3 = SigmoidLayer(50)
    outLayer = LinearLayer(1)

    neural_net.addInputModule(inLayer)
    neural_net.addModule(hiddenLayer_1)
    neural_net.addModule(hiddenLayer_2)
    neural_net.addModule(hiddenLayer_3)
    neural_net.addOutputModule(outLayer)

    in_to_hidden_1 = FullConnection(inLayer, hiddenLayer_1)
    hidden_1_to_hidden_2 = FullConnection(hiddenLayer_1, hiddenLayer_2)
    hidden_2_to_hidden_3 = FullConnection(hiddenLayer_2, hiddenLayer_3)
    hidden_3_to_output = FullConnection(hiddenLayer_3, outLayer)

    neural_net.addConnection(in_to_hidden_1)
    neural_net.addConnection(hidden_1_to_hidden_2)
    neural_net.addConnection(hidden_2_to_hidden_3)
    neural_net.addConnection(hidden_3_to_output)

    neural_net.sortModules()
    return neural_net
    def __init__(self):

        self.Q = FeedForwardNetwork()

        # La funcion de valor se representa con una red neuronal
        # Input: S = (Angulo, Velocidad angular, Posicion), A = accion
        # Output: Valor
        # 2 capas ocultas de 5 neuronas cada una
        # Funcion de activacion sigmoidea
        inLayer = SigmoidLayer(4, name="Input Layer")
        hiddenLayer1 = SigmoidLayer(5, name="Hidden Layer 1")
        hiddenLayer2 = SigmoidLayer(5, name="Hidden Layer 2")
        outLayer = SigmoidLayer(1, name="Output Layer")

        self.Q.addInputModule(inLayer)
        self.Q.addModule(hiddenLayer1)
        self.Q.addModule(hiddenLayer2)
        self.Q.addOutputModule(outLayer)

        connInToHidden1 = FullConnection(inLayer, hiddenLayer1)
        connHidden1ToHidden2 = FullConnection(hiddenLayer1, hiddenLayer2)
        connHidden2ToOut = FullConnection(hiddenLayer2, outLayer)

        self.Q.addConnection(connInToHidden1)
        self.Q.addConnection(connHidden1ToHidden2)
        self.Q.addConnection(connHidden2ToOut)

        self.Q.sortModules()
def setupNetwork(numHiddenNodes, numHiddenLayers, numFeatures, numSpeakers):

    nn = FeedForwardNetwork()
    inputLayer = LinearLayer(numFeatures)
    nn.addInputModule(inputLayer)

    hiddenLayers = []
    for x in range(numHiddenLayers):
        hiddenLayer = TanhLayer(numHiddenNodes)
        nn.addModule(hiddenLayer)
        hiddenLayers.append(hiddenLayer)
    outputLayer = SoftmaxLayer(numSpeakers)
    nn.addOutputModule(outputLayer)

    inputConnection = FullConnection(inputLayer, hiddenLayers[0])
    nn.addConnection(inputConnection)

    for x in range(numHiddenLayers - 1):
        connect = FullConnection(hiddenLayers[x], hiddenLayers[x - 1])
        nn.addConnection(connect)

    outputConnection = FullConnection(hiddenLayers[numHiddenLayers - 1],
                                      outputLayer)
    nn.addConnection(outputConnection)
    nn.sortModules()

    return nn
Exemple #18
0
def trainedANN():
    n = FeedForwardNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()

    draw_connections(n)
    # d = generateTrainingData()
    d = getDatasetFromFile(root.path() + "/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 20:
            return trainedANN()

    exportANN(n)
    draw_connections(n)

    return n
Exemple #19
0
def initalize_nn():
    global in_to_hidden
    global hidden_to_hidden2
    global hidden_to_out
    
    # Old code (regression)        
    n = FeedForwardNetwork()
    # n = buildNetwork( 2, 3, data.outdim, outclass=SoftmaxLayer )

    inLayer = LinearLayer(2)
    hiddenLayer = SigmoidLayer(3)
    hiddenLayer2 = SigmoidLayer(3)
    outLayer = LinearLayer(1)

    n.addInputModule(inLayer)
    n.addModule(hiddenLayer)
    n.addModule(hiddenLayer2)
    n.addOutputModule(outLayer)
        
        
    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_hidden2 = FullConnection(hiddenLayer, hiddenLayer2)
    hidden_to_out = FullConnection(hiddenLayer2, outLayer)

    n.addConnection(in_to_hidden)
    n.addConnection(hidden_to_hidden2)
    n.addConnection(hidden_to_out)
        
    n.sortModules()
    return n
Exemple #20
0
    def buildNN(self, net, functions, inp, out):
        layers = []

        inLayer = self.func[functions[0]](inp)
        layers.append(inLayer)
        outLayer = self.func[functions[-1]](out)

        for neural in range(1, len(net) - 1):
            layers.append(self.func[functions[neural]](1))
        layers.append(outLayer)

        connections, recConnections = self.fillConnections(net, [], [0], layers)
        if len(recConnections) == 0:
            n = FeedForwardNetwork()
        else:
            n = RecurrentNetwork()
        n.addInputModule(inLayer)
        for layer in range(1, len(layers) - 1):
            n.addModule(layers[layer])
        n.addOutputModule(outLayer)

        for con in connections:
            n.addConnection(con)
        for rcon in recConnections:
            n.addRecurrentConnection(rcon)
        n.sortModules()
        return n
Exemple #21
0
def constructPerceptron(name, numNeurons):
    """Возвращает необученную сеть
    Аргументы:
    name -- имя сети, строка
    numNeurons -- число нейронов в каждом слое, список из целых чисел
    """
    # Создаём сеть
    net = FeedForwardNetwork(name)
    # Создаём слои и добавляем их в сеть
    prevLayer = None
    newLayer = None
    for i, val in enumerate(numNeurons):
        # Если слой входной, он линейный
        if (i == 0):
            newLayer = LinearLayer(val, 'input')
            net.addInputModule(newLayer)
            prevLayer = newLayer
        # Если слой выходной, он линейный
        elif (i == len(numNeurons) - 1):
            newLayer = LinearLayer(val, 'output')
            net.addOutputModule(newLayer)
        # Иначе - слой сигмоидный
        else:
            newLayer = SigmoidLayer(val, 'hidden_' + str(i))
            net.addModule(newLayer)
            # Если слой не входной, создаём связь между новым и предыдущим слоями
        if (i > 0):
            conn = FullConnection(prevLayer, newLayer, 'conn_' + str(i))
            net.addConnection(conn)
            prevLayer = newLayer
    # Готовим сеть к активации, упорядочивая её внутреннюю структуру
    net.sortModules()
    # Готово
    return net
def buildNonGravityNet(recurrent=False):
    if recurrent:
        net = RecurrentNetwork()
    else:
        net = FeedForwardNetwork()
    l1 = LinearLayer(2)
    l2 = LinearLayer(3)
    s1 = SigmoidLayer(2)
    l3 = LinearLayer(1)
    net.addInputModule(l1)
    net.addModule(l2)
    net.addModule(s1)
    net.addOutputModule(l3)
    net.addConnection(IdentityConnection(l1, l2, outSliceFrom=1))
    net.addConnection(IdentityConnection(l1, l2, outSliceTo=2))
    net.addConnection(IdentityConnection(l2, l3, inSliceFrom=2))
    net.addConnection(IdentityConnection(l2, l3, inSliceTo=1))
    net.addConnection(IdentityConnection(l1, s1))
    net.addConnection(IdentityConnection(l2, s1, inSliceFrom=1))
    net.addConnection(IdentityConnection(s1, l3, inSliceFrom=1))
    if recurrent:
        net.addRecurrentConnection(IdentityConnection(s1, l1))
        net.addRecurrentConnection(
            IdentityConnection(l2, l2, inSliceFrom=1, outSliceTo=2))
    net.sortModules()
    return net
def fit_predict(xTrain, yTrain, xTest, epochs, neurons):

    # Check edge cases
    if (not len(xTrain) == len(yTrain) or len(xTrain) == 0 or len(xTest) == 0
            or epochs <= 0):
        return

    # Randomize the training data (probably not necessary but pybrain might
    # not shuffle the data itself, so perform as safety check)
    indices = np.arange(len(xTrain))
    np.random.shuffle(indices)

    trainSwapX = [xTrain[x] for x in indices]
    trainSwapY = [yTrain[x] for x in indices]

    supTrain = SupervisedDataSet(len(xTrain[0]), 1)
    for x in range(len(trainSwapX)):
        supTrain.addSample(trainSwapX[x], trainSwapY[x])

    # Construct the feed-forward neural network

    n = FeedForwardNetwork()

    inLayer = LinearLayer(len(xTrain[0]))
    hiddenLayer1 = SigmoidLayer(neurons)
    outLayer = LinearLayer(1)

    n.addInputModule(inLayer)
    n.addModule(hiddenLayer1)
    n.addOutputModule(outLayer)

    in_to_hidden = FullConnection(inLayer, hiddenLayer1)
    hidden_to_out = FullConnection(hiddenLayer1, outLayer)

    n.addConnection(in_to_hidden)
    n.addConnection(hidden_to_out)

    n.sortModules()

    # Train the neural network on the training partition, validating
    # the training progress on the validation partition

    trainer = BackpropTrainer(n,
                              dataset=supTrain,
                              momentum=0.1,
                              learningrate=0.01,
                              verbose=False,
                              weightdecay=0.01)

    trainer.trainUntilConvergence(dataset=supTrain,
                                  maxEpochs=epochs,
                                  validationProportion=0.30)

    outputs = []
    for x in xTest:
        outputs.append(n.activate(x))

    return outputs
def buildNN(indim=4, hiddim=6, outdim=3):
    net = FeedForwardNetwork()
    net.addInputModule(TanhLayer(indim, name='i'))
    net.addModule(TanhLayer(hiddim, name='h'))
    net.addOutputModule(ThresholdLayer(outdim, name='o', threshold=0.5))
    net.addConnection(FullConnection(net['i'], net['h']))
    net.addConnection(FullConnection(net['h'], net['o']))
    net.sortModules()
    return net
Exemple #25
0
def build_fnn():
    fnn = FeedForwardNetwork()
    inLayer = LinearLayer(2)
    hiddenLayer = TanhLayer(50)
    outLayer = SoftmaxLayer(2)
    fnn.addInputModule(inLayer)
    fnn.addModule(hiddenLayer)
    fnn.addOutputModule(outLayer)
    return fnn
Exemple #26
0
 def testMdlstm(self):
     net = FeedForwardNetwork()
     net.addInputModule(LinearLayer(1, name='in'))
     net.addModule(MDLSTMLayer(1, 1, name='hidden'))
     net.addOutputModule(LinearLayer(1, name='out'))
     net.addConnection(FullConnection(net['in'], net['hidden']))
     net.addConnection(FullConnection(net['hidden'], net['out']))
     net.sortModules()
     self.equivalence_feed_forward(net, net.convertToFastNetwork())
Exemple #27
0
def main(f_samples):
    f_reading = open(f_samples, 'r')
    global data
    data = []

    for line in f_reading:
        line = line.split()
        data.append( (float(line[0]), float(line[-1])) )

    #function
    data_module = lambda x: map( lambda z: data[z], filter( lambda y: y% 5 == x, xrange(len(data)) ) )

    global data1
    data1 = [data_module(0), data_module(1), data_module(2), data_module(3), data_module(4)]

    global data_transformed
    data_transformed = take(data, rate = 60)

    global data_transformed_training
    data_transformed_training = map( lambda x: data_transformed[x], filter( lambda x: uniform(0, 1) > 0.3, xrange(len(data_transformed)) ))

    #Learning process-----------------------------------------------------------------

    global net, samples, trainer
    net = FeedForwardNetwork()
    inLayer = LinearLayer(3)
    hiddenLayer0 = SigmoidLayer(1)
    hiddenLayer1 = SigmoidLayer(3)
    outLayer = LinearLayer(1)

    net.addInputModule(inLayer)
#    net.addModule(hiddenLayer0)
#    net.addModule(hiddenLayer1)
    net.addOutputModule(outLayer)

#    net.addConnection(FullConnection(inLayer, hiddenLayer0))
    net.addConnection(FullConnection(inLayer, outLayer))
#    net.addConnection(FullConnection(hiddenLayer0, outLayer))
#    net.addConnection(FullConnection(hiddenLayer0, hiddenLayer1))
#    net.addConnection(FullConnection(hiddenLayer1, outLayer))
    net.sortModules()
    print net
    ##Net with 3 inputs, 8 hidden neurons in a layerand 8 in another, and 1 out.
    #net = buildNetwork(3,8,8,1)
    ##Set with 2 inputs and one output for each sample
    samples = SupervisedDataSet(3,1)

    for i in data_transformed_training:
        samples.addSample(i['past'], i['next'] - i['average'])
    trainer = BackpropTrainer(net, samples)

    print 'Training'
    trainer.trainUntilConvergence(maxEpochs= 10)

    print 'Comparing'
    compare_net_samples(net, data_transformed)
    print "Number of samples %d for training." %len(data_transformed_training)
Exemple #28
0
 def fromModules(cls, visible, hidden, bias, con, biascon):
     net = FeedForwardNetwork()
     net.addInputModule(visible)
     net.addModule(bias)
     net.addOutputModule(hidden)
     net.addConnection(con)
     net.addConnection(biascon)
     net.sortModules()
     return cls(net)
Exemple #29
0
 def __init__(self, network, camada_entrada, camada_oculta, camada_saida):
     self.network = network
     self.network = FeedForwardNetwork()
     self.camada_entrada = camada_entrada
     self.camada_oculta = camada_oculta
     self.camada_saida = camada_saida
     self.ligacao_entrada_oculta = None
     self.ligacao_oculta_saida = None
     self.defineArquitetura()
Exemple #30
0
    def __init__(self, x, y, direction):
        self.age = 0

        # position
        self.x = x
        self.y = y

        # number of fruits peeled
        self.num_peeled = 0
        self.num_eaten = 0
        self.num_moved = 0

        # orientation (0 - 359 degrees)
        self.direction = direction

        # touching anything
        self.touching = None
        self.sees = None

        # hunger sensor
        self.hunger = 2000
        self.avg_hunger = 0

        ###
        # Neural Network
        #
        # Inputs:
        # 1. sees_peeled_orange
        # 2. sees_unpeeled_orange
        # 3. sees_peeled_banana
        # 4. sees_unpeeled_banana
        # 5. sees_animat
        # 6. sees_wall
        # 7. hunger
        # 8. touching_peeled_orange
        # 9. touching_unpeeled_orange
        # 10. touching_peeled_banana
        # 11. touching_unpeeled_banana
        # 12. touching_animat
        # 13. touching_wall
        ###

        self.net = FeedForwardNetwork()
        self.net.addInputModule(LinearLayer(13, name='in'))
        self.net.addModule(SigmoidLayer(14, name='hidden'))
        self.net.addOutputModule(LinearLayer(5, name='out'))
        self.net.addConnection(
            FullConnection(self.net['in'], self.net['hidden']))
        self.net.addConnection(
            FullConnection(self.net['hidden'], self.net['out']))
        self.net.sortModules()

        # thresholds for deciding an action
        self.move_threshold = 0
        self.peel_threshold = 0
        self.eat_threshold = 0